[ 474.302954] env[63241]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=63241) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 474.303304] env[63241]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=63241) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 474.303482] env[63241]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=63241) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 474.303808] env[63241]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 474.397161] env[63241]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=63241) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 474.408892] env[63241]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.012s {{(pid=63241) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 475.010240] env[63241]: INFO nova.virt.driver [None req-1c4cd19f-923d-49be-92a1-ea5f614c0b6d None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 475.080840] env[63241]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 475.080988] env[63241]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 475.081107] env[63241]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=63241) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 478.439439] env[63241]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-84daeb4a-cfdf-44a5-b676-a84cce4d605e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.455519] env[63241]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=63241) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 478.455646] env[63241]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-1a2ec4ba-792e-4098-8b8f-523203c124f8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.495793] env[63241]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 4d7d5. [ 478.495918] env[63241]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.415s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 478.496470] env[63241]: INFO nova.virt.vmwareapi.driver [None req-1c4cd19f-923d-49be-92a1-ea5f614c0b6d None None] VMware vCenter version: 7.0.3 [ 478.499982] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2fc926-3ae1-4e06-b809-07345ede3a5a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.517483] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e8a252-de92-4bd0-93c2-bef35f468112 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.523412] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0a0401-f441-483e-9d09-7a8e6c7db184 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.529835] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e66094-9804-4ec3-8661-7663726738ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.542842] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b44d55-1ff9-4569-9900-928b7bb717c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.548646] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361cfff7-1861-421a-80c0-6c9834d6f694 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.579236] env[63241]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-58d34a9d-b1c2-45b2-85a0-cbe046966530 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.584523] env[63241]: DEBUG nova.virt.vmwareapi.driver [None req-1c4cd19f-923d-49be-92a1-ea5f614c0b6d None None] Extension org.openstack.compute already exists. {{(pid=63241) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 478.587219] env[63241]: INFO nova.compute.provider_config [None req-1c4cd19f-923d-49be-92a1-ea5f614c0b6d None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 479.090532] env[63241]: DEBUG nova.context [None req-1c4cd19f-923d-49be-92a1-ea5f614c0b6d None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),05ad17b7-9ff0-49fa-b74b-9f6292059db3(cell1) {{(pid=63241) load_cells /opt/stack/nova/nova/context.py:464}} [ 479.092865] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 479.093133] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 479.093827] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 479.094300] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Acquiring lock "05ad17b7-9ff0-49fa-b74b-9f6292059db3" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 479.094495] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Lock "05ad17b7-9ff0-49fa-b74b-9f6292059db3" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 479.095543] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Lock "05ad17b7-9ff0-49fa-b74b-9f6292059db3" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 479.116726] env[63241]: INFO dbcounter [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Registered counter for database nova_cell0 [ 479.124823] env[63241]: INFO dbcounter [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Registered counter for database nova_cell1 [ 479.127957] env[63241]: DEBUG oslo_db.sqlalchemy.engines [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63241) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 479.128325] env[63241]: DEBUG oslo_db.sqlalchemy.engines [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63241) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 479.133072] env[63241]: ERROR nova.db.main.api [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 479.133072] env[63241]: result = function(*args, **kwargs) [ 479.133072] env[63241]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 479.133072] env[63241]: return func(*args, **kwargs) [ 479.133072] env[63241]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 479.133072] env[63241]: result = fn(*args, **kwargs) [ 479.133072] env[63241]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 479.133072] env[63241]: return f(*args, **kwargs) [ 479.133072] env[63241]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 479.133072] env[63241]: return db.service_get_minimum_version(context, binaries) [ 479.133072] env[63241]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 479.133072] env[63241]: _check_db_access() [ 479.133072] env[63241]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 479.133072] env[63241]: stacktrace = ''.join(traceback.format_stack()) [ 479.133072] env[63241]: [ 479.134050] env[63241]: ERROR nova.db.main.api [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 479.134050] env[63241]: result = function(*args, **kwargs) [ 479.134050] env[63241]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 479.134050] env[63241]: return func(*args, **kwargs) [ 479.134050] env[63241]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 479.134050] env[63241]: result = fn(*args, **kwargs) [ 479.134050] env[63241]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 479.134050] env[63241]: return f(*args, **kwargs) [ 479.134050] env[63241]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 479.134050] env[63241]: return db.service_get_minimum_version(context, binaries) [ 479.134050] env[63241]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 479.134050] env[63241]: _check_db_access() [ 479.134050] env[63241]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 479.134050] env[63241]: stacktrace = ''.join(traceback.format_stack()) [ 479.134050] env[63241]: [ 479.134431] env[63241]: WARNING nova.objects.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Failed to get minimum service version for cell 05ad17b7-9ff0-49fa-b74b-9f6292059db3 [ 479.134589] env[63241]: WARNING nova.objects.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 479.135014] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Acquiring lock "singleton_lock" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 479.135186] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Acquired lock "singleton_lock" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 479.135420] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Releasing lock "singleton_lock" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 479.135731] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Full set of CONF: {{(pid=63241) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 479.135872] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ******************************************************************************** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 479.135998] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Configuration options gathered from: {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 479.136152] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 479.136337] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 479.136464] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ================================================================================ {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 479.136672] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] allow_resize_to_same_host = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.136838] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] arq_binding_timeout = 300 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.136969] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] backdoor_port = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.137109] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] backdoor_socket = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.137271] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] block_device_allocate_retries = 60 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.137429] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] block_device_allocate_retries_interval = 3 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.137594] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cert = self.pem {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.137752] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.137919] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute_monitors = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.138098] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] config_dir = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.138269] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] config_drive_format = iso9660 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.138400] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.138560] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] config_source = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.138724] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] console_host = devstack {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.138884] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] control_exchange = nova {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.139060] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cpu_allocation_ratio = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.139206] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] daemon = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.139371] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] debug = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.139523] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] default_access_ip_network_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.139683] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] default_availability_zone = nova {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.139833] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] default_ephemeral_format = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.139986] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] default_green_pool_size = 1000 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.140242] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.140403] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] default_schedule_zone = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.140557] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] disk_allocation_ratio = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.140714] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] enable_new_services = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.140885] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] enabled_apis = ['osapi_compute'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.141057] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] enabled_ssl_apis = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.141219] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] flat_injected = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.141373] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] force_config_drive = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.141527] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] force_raw_images = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.141690] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] graceful_shutdown_timeout = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.141845] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] heal_instance_info_cache_interval = 60 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.142096] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] host = cpu-1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.142348] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] initial_cpu_allocation_ratio = 4.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.142553] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] initial_disk_allocation_ratio = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.142720] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] initial_ram_allocation_ratio = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.142933] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.143118] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] instance_build_timeout = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.143283] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] instance_delete_interval = 300 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.143448] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] instance_format = [instance: %(uuid)s] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.143614] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] instance_name_template = instance-%08x {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.143774] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] instance_usage_audit = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.143944] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] instance_usage_audit_period = month {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.144126] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.144294] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] instances_path = /opt/stack/data/nova/instances {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.144456] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] internal_service_availability_zone = internal {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.144609] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] key = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.144762] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] live_migration_retry_count = 30 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.144925] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] log_color = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.145097] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] log_config_append = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.145263] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.145419] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] log_dir = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.145576] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] log_file = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.145703] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] log_options = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.145859] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] log_rotate_interval = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.146033] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] log_rotate_interval_type = days {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.146202] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] log_rotation_type = none {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.146327] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.146449] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.146611] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.146769] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.146894] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.147064] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] long_rpc_timeout = 1800 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.147224] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] max_concurrent_builds = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.147379] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] max_concurrent_live_migrations = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.147536] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] max_concurrent_snapshots = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.147692] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] max_local_block_devices = 3 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.147845] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] max_logfile_count = 30 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.147997] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] max_logfile_size_mb = 200 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.148166] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] maximum_instance_delete_attempts = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.148331] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] metadata_listen = 0.0.0.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.148495] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] metadata_listen_port = 8775 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.148662] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] metadata_workers = 2 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.148821] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] migrate_max_retries = -1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.148986] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] mkisofs_cmd = genisoimage {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.149207] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] my_block_storage_ip = 10.180.1.21 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.149339] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] my_ip = 10.180.1.21 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.149500] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] network_allocate_retries = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.149675] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.149840] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] osapi_compute_listen = 0.0.0.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.150013] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] osapi_compute_listen_port = 8774 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.150184] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] osapi_compute_unique_server_name_scope = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.150348] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] osapi_compute_workers = 2 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.150510] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] password_length = 12 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.150670] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] periodic_enable = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.150830] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] periodic_fuzzy_delay = 60 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.150996] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] pointer_model = usbtablet {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.151180] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] preallocate_images = none {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.151341] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] publish_errors = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.151468] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] pybasedir = /opt/stack/nova {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.151624] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ram_allocation_ratio = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.151781] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] rate_limit_burst = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.151965] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] rate_limit_except_level = CRITICAL {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.152154] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] rate_limit_interval = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.152326] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] reboot_timeout = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.152487] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] reclaim_instance_interval = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.152641] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] record = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.152805] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] reimage_timeout_per_gb = 60 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.152989] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] report_interval = 120 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.153176] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] rescue_timeout = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.153337] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] reserved_host_cpus = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.153497] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] reserved_host_disk_mb = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.153653] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] reserved_host_memory_mb = 512 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.153808] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] reserved_huge_pages = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.153964] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] resize_confirm_window = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.154135] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] resize_fs_using_block_device = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.154295] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] resume_guests_state_on_host_boot = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.154459] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.154618] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] rpc_response_timeout = 60 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.154777] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] run_external_periodic_tasks = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.154944] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] running_deleted_instance_action = reap {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.155122] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] running_deleted_instance_poll_interval = 1800 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.155281] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] running_deleted_instance_timeout = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.155436] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] scheduler_instance_sync_interval = 120 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.155602] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] service_down_time = 720 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.155767] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] servicegroup_driver = db {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.155922] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] shell_completion = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.156095] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] shelved_offload_time = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.156256] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] shelved_poll_interval = 3600 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.156423] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] shutdown_timeout = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.156583] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] source_is_ipv6 = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.156740] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ssl_only = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.156985] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.157169] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] sync_power_state_interval = 600 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.157330] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] sync_power_state_pool_size = 1000 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.157496] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] syslog_log_facility = LOG_USER {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.157649] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] tempdir = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.157806] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] timeout_nbd = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.157972] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] transport_url = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.158145] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] update_resources_interval = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.158305] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] use_cow_images = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.158460] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] use_eventlog = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.158614] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] use_journal = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.158767] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] use_json = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.158920] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] use_rootwrap_daemon = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.159085] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] use_stderr = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.159272] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] use_syslog = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.159386] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vcpu_pin_set = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.159548] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plugging_is_fatal = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.159710] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plugging_timeout = 300 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.159871] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] virt_mkfs = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.160048] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] volume_usage_poll_interval = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.160214] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] watch_log_file = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.160378] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] web = /usr/share/spice-html5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 479.160556] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.160718] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.160875] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_brick.wait_mpath_device_interval = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.161051] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_concurrency.disable_process_locking = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.161591] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.161785] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.161978] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.162187] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_metrics.metrics_process_name = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.162363] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.162532] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.162715] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.auth_strategy = keystone {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.162883] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.compute_link_prefix = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.163131] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.163320] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.dhcp_domain = novalocal {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.163489] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.enable_instance_password = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.163651] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.glance_link_prefix = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.163813] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.163980] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.instance_list_cells_batch_strategy = distributed {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.164155] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.instance_list_per_project_cells = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.164318] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.list_records_by_skipping_down_cells = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.164474] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.local_metadata_per_cell = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.164639] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.max_limit = 1000 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.164807] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.metadata_cache_expiration = 15 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.164982] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.neutron_default_tenant_id = default {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.165163] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.response_validation = warn {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.165330] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.use_neutron_default_nets = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.165497] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.165656] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.vendordata_dynamic_failure_fatal = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.165821] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.165994] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.vendordata_dynamic_ssl_certfile = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.166181] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.vendordata_dynamic_targets = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.166347] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.vendordata_jsonfile_path = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.166528] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api.vendordata_providers = ['StaticJSON'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.166720] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.backend = dogpile.cache.memcached {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.166889] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.backend_argument = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.167068] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.backend_expiration_time = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.167243] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.config_prefix = cache.oslo {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.167415] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.dead_timeout = 60.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.167580] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.debug_cache_backend = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.167746] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.enable_retry_client = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.167909] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.enable_socket_keepalive = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.168096] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.enabled = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.168266] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.enforce_fips_mode = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.168429] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.expiration_time = 600 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.168592] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.hashclient_retry_attempts = 2 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.168754] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.hashclient_retry_delay = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.168916] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.memcache_dead_retry = 300 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.169089] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.memcache_password = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.169254] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.169416] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.169579] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.memcache_pool_maxsize = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.169736] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.memcache_pool_unused_timeout = 60 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.169900] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.memcache_sasl_enabled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.170090] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.memcache_servers = ['localhost:11211'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.170259] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.memcache_socket_timeout = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.170418] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.memcache_username = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.170582] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.proxies = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.170743] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.redis_db = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.170902] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.redis_password = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.171083] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.redis_sentinel_service_name = mymaster {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.171261] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.171429] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.redis_server = localhost:6379 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.171589] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.redis_socket_timeout = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.171745] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.redis_username = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.171907] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.retry_attempts = 2 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.172107] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.retry_delay = 0.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.172277] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.socket_keepalive_count = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.172439] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.socket_keepalive_idle = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.172598] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.socket_keepalive_interval = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.172753] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.tls_allowed_ciphers = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.172916] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.tls_cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.173114] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.tls_certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.173281] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.tls_enabled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.173438] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cache.tls_keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.173606] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.auth_section = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.173778] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.auth_type = password {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.173937] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.174127] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.catalog_info = volumev3::publicURL {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.174289] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.174451] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.174610] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.cross_az_attach = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.174768] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.debug = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.174924] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.endpoint_template = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.175097] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.http_retries = 3 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.175261] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.175415] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.175586] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.os_region_name = RegionOne {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.175751] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.175908] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cinder.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.176089] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.176251] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.cpu_dedicated_set = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.176409] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.cpu_shared_set = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.176573] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.image_type_exclude_list = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.176733] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.live_migration_wait_for_vif_plug = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.176892] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.max_concurrent_disk_ops = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.177064] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.max_disk_devices_to_attach = -1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.177230] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.177403] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.177565] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.resource_provider_association_refresh = 300 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.177724] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.177883] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.shutdown_retry_interval = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.178074] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.178258] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] conductor.workers = 2 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.178433] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] console.allowed_origins = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.178591] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] console.ssl_ciphers = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.178757] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] console.ssl_minimum_version = default {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.178928] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] consoleauth.enforce_session_timeout = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.179101] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] consoleauth.token_ttl = 600 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.179271] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.179432] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.179589] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.179745] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.connect_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.179900] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.connect_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.180066] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.endpoint_override = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.180234] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.180389] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.180545] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.max_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.180701] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.min_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.180856] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.region_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.181018] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.retriable_status_codes = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.181176] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.service_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.181348] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.service_type = accelerator {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.181508] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.181664] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.status_code_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.181819] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.status_code_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.181994] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.182198] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.182361] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] cyborg.version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.182538] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.backend = sqlalchemy {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.182708] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.connection = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.182872] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.connection_debug = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.183079] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.connection_parameters = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.183255] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.connection_recycle_time = 3600 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.183417] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.connection_trace = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.183578] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.db_inc_retry_interval = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.183740] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.db_max_retries = 20 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.183900] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.db_max_retry_interval = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.184072] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.db_retry_interval = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.184238] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.max_overflow = 50 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.184393] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.max_pool_size = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.184550] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.max_retries = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.184715] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.mysql_sql_mode = TRADITIONAL {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.184870] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.mysql_wsrep_sync_wait = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.185033] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.pool_timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.185199] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.retry_interval = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.185354] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.slave_connection = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.185512] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.sqlite_synchronous = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.185671] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] database.use_db_reconnect = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.185848] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.backend = sqlalchemy {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.186026] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.connection = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.186194] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.connection_debug = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.186361] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.connection_parameters = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.186518] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.connection_recycle_time = 3600 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.186677] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.connection_trace = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.186833] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.db_inc_retry_interval = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.186991] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.db_max_retries = 20 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.187165] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.db_max_retry_interval = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.187323] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.db_retry_interval = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.187480] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.max_overflow = 50 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.187636] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.max_pool_size = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.187792] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.max_retries = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.187955] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.188124] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.mysql_wsrep_sync_wait = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.188280] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.pool_timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.188435] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.retry_interval = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.188589] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.slave_connection = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.188747] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] api_database.sqlite_synchronous = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.188917] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] devices.enabled_mdev_types = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.189106] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.189278] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ephemeral_storage_encryption.default_format = luks {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.189440] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ephemeral_storage_encryption.enabled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.189600] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ephemeral_storage_encryption.key_size = 512 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.189769] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.api_servers = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.189931] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.190104] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.190269] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.190424] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.connect_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.190580] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.connect_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.190737] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.debug = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.190900] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.default_trusted_certificate_ids = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.191068] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.enable_certificate_validation = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.191233] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.enable_rbd_download = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.191386] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.endpoint_override = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.191565] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.191788] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.191979] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.max_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.192171] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.min_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.192339] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.num_retries = 3 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.192509] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.rbd_ceph_conf = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.192678] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.rbd_connect_timeout = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.192841] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.rbd_pool = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.193031] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.rbd_user = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.193216] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.region_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.193377] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.retriable_status_codes = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.193536] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.service_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.193703] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.service_type = image {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.193870] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.194039] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.status_code_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.194201] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.status_code_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.194359] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.194538] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.194716] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.verify_glance_signatures = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.194881] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] glance.version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.195061] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] guestfs.debug = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.195232] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] mks.enabled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.195578] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.195773] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] image_cache.manager_interval = 2400 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.195944] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] image_cache.precache_concurrency = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.196135] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] image_cache.remove_unused_base_images = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.196306] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.196475] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.196651] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] image_cache.subdirectory_name = _base {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.196828] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.api_max_retries = 60 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.196990] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.api_retry_interval = 2 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.197164] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.auth_section = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.197322] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.auth_type = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.197478] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.197631] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.197789] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.197947] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.conductor_group = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.198117] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.connect_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.198274] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.connect_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.198427] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.endpoint_override = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.198584] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.198736] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.198887] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.max_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.199050] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.min_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.199215] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.peer_list = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.199368] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.region_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.199521] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.retriable_status_codes = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.199680] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.serial_console_state_timeout = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.199837] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.service_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.200016] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.service_type = baremetal {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.200177] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.shard = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.200340] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.200497] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.status_code_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.200652] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.status_code_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.200809] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.200988] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.201166] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ironic.version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.201348] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.201522] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] key_manager.fixed_key = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.201713] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.201971] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.barbican_api_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.202156] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.barbican_endpoint = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.202337] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.barbican_endpoint_type = public {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.202500] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.barbican_region_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.202659] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.202818] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.202979] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.203156] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.203313] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.203473] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.number_of_retries = 60 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.203634] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.retry_delay = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.203796] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.send_service_user_token = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.203965] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.204149] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.204331] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.verify_ssl = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.204499] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican.verify_ssl_path = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.204665] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican_service_user.auth_section = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.204826] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican_service_user.auth_type = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.204984] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican_service_user.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.205159] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican_service_user.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.205324] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican_service_user.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.205484] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican_service_user.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.205640] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican_service_user.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.205800] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican_service_user.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.205957] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] barbican_service_user.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.206139] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vault.approle_role_id = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.206304] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vault.approle_secret_id = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.206472] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vault.kv_mountpoint = secret {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.206629] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vault.kv_path = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.206791] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vault.kv_version = 2 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.206949] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vault.namespace = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.207119] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vault.root_token_id = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.207275] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vault.ssl_ca_crt_file = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.207438] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vault.timeout = 60.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.207597] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vault.use_ssl = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.207762] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.207928] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.208099] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.208266] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.208424] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.connect_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.208582] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.connect_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.208738] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.endpoint_override = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.208896] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.209064] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.209226] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.max_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.209380] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.min_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.209537] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.region_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.209695] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.retriable_status_codes = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.209852] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.service_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.210036] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.service_type = identity {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.210204] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.210362] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.status_code_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.210518] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.status_code_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.210675] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.210855] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.211024] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] keystone.version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.211229] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.connection_uri = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.211393] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.cpu_mode = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.211557] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.cpu_model_extra_flags = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.211722] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.cpu_models = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.211891] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.cpu_power_governor_high = performance {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.212099] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.cpu_power_governor_low = powersave {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.212273] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.cpu_power_management = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.212445] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.212609] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.device_detach_attempts = 8 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.212768] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.device_detach_timeout = 20 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.212951] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.disk_cachemodes = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.213152] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.disk_prefix = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.213329] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.enabled_perf_events = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.213495] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.file_backed_memory = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.213662] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.gid_maps = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.213821] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.hw_disk_discard = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.213979] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.hw_machine_type = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.214216] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.images_rbd_ceph_conf = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.214413] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.214582] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.214754] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.images_rbd_glance_store_name = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.214924] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.images_rbd_pool = rbd {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.215124] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.images_type = default {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.215310] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.images_volume_group = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.215476] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.inject_key = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.215639] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.inject_partition = -2 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.215798] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.inject_password = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.215960] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.iscsi_iface = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.216137] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.iser_use_multipath = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.216300] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_bandwidth = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.216462] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_completion_timeout = 800 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.216622] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_downtime = 500 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.216782] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_downtime_delay = 75 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.216943] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_downtime_steps = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.217114] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_inbound_addr = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.217278] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_permit_auto_converge = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.217435] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_permit_post_copy = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.217597] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_scheme = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.217769] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_timeout_action = abort {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.217935] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_tunnelled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.218106] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_uri = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.218268] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.live_migration_with_native_tls = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.218427] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.max_queues = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.218587] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.mem_stats_period_seconds = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.218806] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.218970] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.nfs_mount_options = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.219264] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.219438] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.num_aoe_discover_tries = 3 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.219600] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.num_iser_scan_tries = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.219762] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.num_memory_encrypted_guests = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.219924] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.num_nvme_discover_tries = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.220106] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.num_pcie_ports = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.220275] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.num_volume_scan_tries = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.220440] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.pmem_namespaces = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.220598] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.quobyte_client_cfg = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.220879] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.221067] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.rbd_connect_timeout = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.221237] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.221398] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.221621] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.rbd_secret_uuid = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.221798] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.rbd_user = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.221994] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.realtime_scheduler_priority = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.222201] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.remote_filesystem_transport = ssh {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.222364] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.rescue_image_id = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.222523] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.rescue_kernel_id = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.222680] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.rescue_ramdisk_id = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.222848] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.rng_dev_path = /dev/urandom {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.223014] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.rx_queue_size = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.223189] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.smbfs_mount_options = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.223481] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.223655] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.snapshot_compression = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.223818] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.snapshot_image_format = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.224048] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.224226] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.sparse_logical_volumes = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.224414] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.swtpm_enabled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.224590] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.swtpm_group = tss {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.224762] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.swtpm_user = tss {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.224934] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.sysinfo_serial = unique {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.225109] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.tb_cache_size = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.225271] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.tx_queue_size = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.225434] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.uid_maps = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.225596] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.use_virtio_for_bridges = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.225765] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.virt_type = kvm {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.225932] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.volume_clear = zero {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.226109] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.volume_clear_size = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.226275] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.volume_use_multipath = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.226432] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.vzstorage_cache_path = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.226600] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.226766] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.vzstorage_mount_group = qemu {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.226929] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.vzstorage_mount_opts = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.227118] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.227466] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.227668] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.vzstorage_mount_user = stack {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.227842] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.228034] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.auth_section = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.228219] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.auth_type = password {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.228384] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.228545] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.228708] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.228867] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.connect_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.229035] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.connect_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.229214] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.default_floating_pool = public {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.229374] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.endpoint_override = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.229535] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.extension_sync_interval = 600 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.229696] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.http_retries = 3 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.229856] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.230020] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.230186] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.max_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.230355] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.metadata_proxy_shared_secret = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.230515] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.min_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.230684] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.ovs_bridge = br-int {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.230850] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.physnets = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.231036] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.region_name = RegionOne {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.231200] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.retriable_status_codes = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.231368] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.service_metadata_proxy = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.231524] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.service_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.231687] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.service_type = network {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.231846] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.232035] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.status_code_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.232209] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.status_code_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.232369] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.232546] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.232703] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] neutron.version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.232872] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] notifications.bdms_in_notifications = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.233100] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] notifications.default_level = INFO {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.233294] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] notifications.notification_format = unversioned {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.233460] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] notifications.notify_on_state_change = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.233634] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.233805] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] pci.alias = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.233970] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] pci.device_spec = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.234148] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] pci.report_in_placement = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.234318] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.auth_section = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.234486] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.auth_type = password {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.234649] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.auth_url = http://10.180.1.21/identity {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.234808] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.234963] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.235166] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.235343] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.connect_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.235503] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.connect_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.235657] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.default_domain_id = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.235811] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.default_domain_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.235967] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.domain_id = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.236143] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.domain_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.236296] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.endpoint_override = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.236454] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.236608] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.236760] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.max_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.236914] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.min_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.237127] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.password = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.237245] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.project_domain_id = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.237412] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.project_domain_name = Default {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.237572] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.project_id = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.237741] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.project_name = service {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.237906] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.region_name = RegionOne {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.238090] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.retriable_status_codes = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.238253] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.service_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.238421] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.service_type = placement {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.238618] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.238789] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.status_code_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.238948] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.status_code_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.239118] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.system_scope = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.239307] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.239476] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.trust_id = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.239634] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.user_domain_id = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.239800] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.user_domain_name = Default {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.239959] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.user_id = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.240153] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.username = nova {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.240333] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.240492] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] placement.version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.240667] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.cores = 20 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.240832] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.count_usage_from_placement = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.241013] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.241259] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.injected_file_content_bytes = 10240 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.241458] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.injected_file_path_length = 255 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.241629] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.injected_files = 5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.241795] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.instances = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.241979] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.key_pairs = 100 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.242172] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.metadata_items = 128 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.242344] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.ram = 51200 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.242506] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.recheck_quota = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.242670] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.server_group_members = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.242833] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] quota.server_groups = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.243085] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.243288] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.243454] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] scheduler.image_metadata_prefilter = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.243615] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.243776] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] scheduler.max_attempts = 3 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.243937] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] scheduler.max_placement_results = 1000 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.244117] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.244281] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] scheduler.query_placement_for_image_type_support = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.244443] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.244616] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] scheduler.workers = 2 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.244786] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.244955] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.245148] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.245314] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.245478] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.245638] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.245796] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.245977] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.246229] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.host_subset_size = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.246425] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.246588] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.image_properties_default_architecture = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.246750] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.246909] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.isolated_hosts = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.247080] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.isolated_images = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.247246] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.max_instances_per_host = 50 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.247402] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.247558] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.247715] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.pci_in_placement = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.247870] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.248035] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.248199] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.248363] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.248519] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.248675] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.248830] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.track_instance_changes = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.249009] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.249192] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] metrics.required = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.249417] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] metrics.weight_multiplier = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.249593] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] metrics.weight_of_unavailable = -10000.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.249756] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] metrics.weight_setting = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.250073] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.250255] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] serial_console.enabled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.250432] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] serial_console.port_range = 10000:20000 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.250601] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.250767] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.250932] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] serial_console.serialproxy_port = 6083 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.251118] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] service_user.auth_section = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.251292] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] service_user.auth_type = password {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.251449] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] service_user.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.251605] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] service_user.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.251764] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] service_user.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.251923] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] service_user.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.252118] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] service_user.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.252299] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] service_user.send_service_user_token = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.252463] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] service_user.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.252615] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] service_user.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.252800] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.agent_enabled = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.252986] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.enabled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.253318] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.253523] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.html5proxy_host = 0.0.0.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.253693] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.html5proxy_port = 6082 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.253851] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.image_compression = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.254041] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.jpeg_compression = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.254186] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.playback_compression = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.254344] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.require_secure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.254505] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.server_listen = 127.0.0.1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.254671] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.254841] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.streaming_mode = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.254981] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] spice.zlib_compression = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.255159] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] upgrade_levels.baseapi = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.255332] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] upgrade_levels.compute = auto {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.255497] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] upgrade_levels.conductor = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.255646] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] upgrade_levels.scheduler = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.255807] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vendordata_dynamic_auth.auth_section = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.255972] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vendordata_dynamic_auth.auth_type = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.256157] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vendordata_dynamic_auth.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.256287] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vendordata_dynamic_auth.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.256445] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vendordata_dynamic_auth.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.256605] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vendordata_dynamic_auth.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.256772] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vendordata_dynamic_auth.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.256914] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vendordata_dynamic_auth.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.257078] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vendordata_dynamic_auth.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.257254] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.api_retry_count = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.257403] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.ca_file = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.257583] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.cache_prefix = devstack-image-cache {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.257731] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.cluster_name = testcl1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.257888] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.connection_pool_size = 10 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.258053] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.console_delay_seconds = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.258226] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.datastore_regex = ^datastore.* {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.258449] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.258595] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.host_password = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.258760] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.host_port = 443 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.258930] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.host_username = administrator@vsphere.local {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.259117] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.insecure = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.259287] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.integration_bridge = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.259466] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.maximum_objects = 100 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.259629] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.pbm_default_policy = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.259791] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.pbm_enabled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.259948] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.pbm_wsdl_location = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.260134] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.260338] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.serial_port_proxy_uri = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.260546] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.serial_port_service_uri = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.260711] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.task_poll_interval = 0.5 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.260885] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.use_linked_clone = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.261072] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.vnc_keymap = en-us {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.261255] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.vnc_port = 5900 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.261405] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vmware.vnc_port_total = 10000 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.261588] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vnc.auth_schemes = ['none'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.261761] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vnc.enabled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.262087] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.262281] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.262450] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vnc.novncproxy_port = 6080 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.262633] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vnc.server_listen = 127.0.0.1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.262807] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.262998] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vnc.vencrypt_ca_certs = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.263194] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vnc.vencrypt_client_cert = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.263339] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vnc.vencrypt_client_key = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.263517] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.263677] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.disable_deep_image_inspection = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.263840] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.disable_fallback_pcpu_query = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.264010] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.disable_group_policy_check_upcall = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.264179] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.264336] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.disable_rootwrap = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.264493] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.enable_numa_live_migration = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.264652] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.264809] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.264967] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.handle_virt_lifecycle_events = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.265139] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.libvirt_disable_apic = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.265299] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.never_download_image_if_on_rbd = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.265457] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.265614] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.265770] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.265928] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.266100] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.266261] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.266416] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.266571] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.266730] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.266913] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.267092] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] wsgi.client_socket_timeout = 900 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.267263] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] wsgi.default_pool_size = 1000 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.267429] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] wsgi.keep_alive = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.267593] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] wsgi.max_header_line = 16384 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.267753] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] wsgi.secure_proxy_ssl_header = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.267912] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] wsgi.ssl_ca_file = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.268081] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] wsgi.ssl_cert_file = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.268251] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] wsgi.ssl_key_file = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.268414] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] wsgi.tcp_keepidle = 600 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.268588] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.268751] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] zvm.ca_file = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.268905] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] zvm.cloud_connector_url = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.269209] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.269389] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] zvm.reachable_timeout = 300 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.269571] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_policy.enforce_new_defaults = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.269937] env[63241]: WARNING oslo_config.cfg [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 479.270140] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_policy.enforce_scope = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.270317] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_policy.policy_default_rule = default {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.270495] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.270665] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_policy.policy_file = policy.yaml {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.270835] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.270995] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.271169] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.271328] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.271490] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.271656] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_policy.remote_timeout = 60.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.271824] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.272031] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.272226] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler.connection_string = messaging:// {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.272431] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler.enabled = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.272609] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler.es_doc_type = notification {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.272775] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler.es_scroll_size = 10000 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.272961] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler.es_scroll_time = 2m {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.273168] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler.filter_error_trace = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.273344] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler.hmac_keys = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.273510] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler.sentinel_service_name = mymaster {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.273675] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler.socket_timeout = 0.1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.273837] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler.trace_requests = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.273995] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler.trace_sqlalchemy = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.274193] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler_jaeger.process_tags = {} {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.274353] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler_jaeger.service_name_prefix = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.274511] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] profiler_otlp.service_name_prefix = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.274674] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] remote_debug.host = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.274829] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] remote_debug.port = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.275010] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.275183] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.275343] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.275500] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.275656] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.275810] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.275965] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.276136] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.276293] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.276457] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.hostname = devstack {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.276611] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.276775] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.276935] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.277115] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.277282] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.277446] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.277608] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.277777] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.277938] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.278109] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.278273] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.278434] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.278594] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.278757] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.278915] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.279086] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.279249] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.279407] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.279570] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.279730] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.ssl = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.279900] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.280088] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.280255] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.280423] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.280589] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.ssl_version = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.280747] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.280929] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.281107] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_notifications.retry = -1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.281291] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.281462] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_messaging_notifications.transport_url = **** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.281635] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.auth_section = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.281793] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.auth_type = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.281977] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.cafile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.282149] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.certfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.282315] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.collect_timing = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.282470] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.connect_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.282628] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.connect_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.282782] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.endpoint_id = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.282979] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.endpoint_interface = publicURL {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.283182] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.endpoint_override = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.283344] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.endpoint_region_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.283503] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.endpoint_service_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.283660] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.endpoint_service_type = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.283824] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.insecure = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.283979] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.keyfile = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.284151] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.max_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.284306] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.min_version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.284459] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.region_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.284612] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.retriable_status_codes = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.284764] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.service_name = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.284918] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.service_type = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.285092] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.split_loggers = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.285254] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.status_code_retries = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.285412] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.status_code_retry_delay = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.285566] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.timeout = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.285719] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.valid_interfaces = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.285874] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_limit.version = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.286047] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_reports.file_event_handler = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.286213] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_reports.file_event_handler_interval = 1 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.286370] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] oslo_reports.log_dir = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.286536] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.286694] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_linux_bridge_privileged.group = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.286849] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.287023] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.287194] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.287349] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_linux_bridge_privileged.user = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.287516] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.287672] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_ovs_privileged.group = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.287826] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_ovs_privileged.helper_command = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.287989] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.288163] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.288319] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] vif_plug_ovs_privileged.user = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.288485] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_linux_bridge.flat_interface = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.288664] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.288836] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.289013] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.289196] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.289363] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.289528] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.289690] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_linux_bridge.vlan_interface = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.289866] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.290049] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_ovs.isolate_vif = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.290223] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.290390] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.290560] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.290730] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_ovs.ovsdb_interface = native {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.290892] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] os_vif_ovs.per_port_bridge = False {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.291080] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] privsep_osbrick.capabilities = [21] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.291242] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] privsep_osbrick.group = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.291397] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] privsep_osbrick.helper_command = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.291558] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.291718] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] privsep_osbrick.thread_pool_size = 8 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.291874] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] privsep_osbrick.user = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.292087] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.292247] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] nova_sys_admin.group = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.292404] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] nova_sys_admin.helper_command = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.292566] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.292723] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] nova_sys_admin.thread_pool_size = 8 {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.292877] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] nova_sys_admin.user = None {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 479.293035] env[63241]: DEBUG oslo_service.service [None req-1ae4c560-47ce-4ab6-9781-53c7485440ed None None] ******************************************************************************** {{(pid=63241) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 479.293516] env[63241]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 479.796384] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Getting list of instances from cluster (obj){ [ 479.796384] env[63241]: value = "domain-c8" [ 479.796384] env[63241]: _type = "ClusterComputeResource" [ 479.796384] env[63241]: } {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 479.797530] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f2658e-d343-4e21-b471-add1451c2bf7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 479.806686] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Got total of 0 instances {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 479.807206] env[63241]: WARNING nova.virt.vmwareapi.driver [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 479.807666] env[63241]: INFO nova.virt.node [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Generated node identity 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b [ 479.807898] env[63241]: INFO nova.virt.node [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Wrote node identity 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b to /opt/stack/data/n-cpu-1/compute_id [ 480.310976] env[63241]: WARNING nova.compute.manager [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Compute nodes ['9a5e30eb-ceae-4224-aa66-dcbfa98ce24b'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 481.321018] env[63241]: INFO nova.compute.manager [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 482.327176] env[63241]: WARNING nova.compute.manager [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 482.327487] env[63241]: DEBUG oslo_concurrency.lockutils [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 482.327733] env[63241]: DEBUG oslo_concurrency.lockutils [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 482.327884] env[63241]: DEBUG oslo_concurrency.lockutils [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 482.328067] env[63241]: DEBUG nova.compute.resource_tracker [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 482.328996] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4999aaaf-96cc-4143-9e5a-853dcbb66b38 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.339260] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8495dc0c-579b-4780-be6e-97cd9b317981 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.354039] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfcfa22b-c612-4c93-918f-755d9d1607f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.360030] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440cfbb5-4262-46cd-824e-908422692ce0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 482.389185] env[63241]: DEBUG nova.compute.resource_tracker [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181386MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 482.389392] env[63241]: DEBUG oslo_concurrency.lockutils [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 482.389567] env[63241]: DEBUG oslo_concurrency.lockutils [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 482.895155] env[63241]: WARNING nova.compute.resource_tracker [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] No compute node record for cpu-1:9a5e30eb-ceae-4224-aa66-dcbfa98ce24b: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b could not be found. [ 483.398968] env[63241]: INFO nova.compute.resource_tracker [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b [ 484.915083] env[63241]: DEBUG nova.compute.resource_tracker [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 484.915511] env[63241]: DEBUG nova.compute.resource_tracker [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 485.064882] env[63241]: INFO nova.scheduler.client.report [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] [req-29c5d5c5-ba37-4398-ae2c-f9d145e50c5b] Created resource provider record via placement API for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 485.087505] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ecd713-c76e-4298-bc37-ee6793ad4a30 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.096070] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3df9db-7d25-48c6-9bb5-5cfd0a1f6af5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.125083] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023db78f-26c6-4bbd-b918-275a73448828 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.132180] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078fe8fa-e1d2-41bf-a438-c759f4f9c2c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.145092] env[63241]: DEBUG nova.compute.provider_tree [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 485.678375] env[63241]: DEBUG nova.scheduler.client.report [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 485.678612] env[63241]: DEBUG nova.compute.provider_tree [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 0 to 1 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 485.678755] env[63241]: DEBUG nova.compute.provider_tree [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 485.728444] env[63241]: DEBUG nova.compute.provider_tree [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 1 to 2 during operation: update_traits {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 486.235120] env[63241]: DEBUG nova.compute.resource_tracker [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 486.235467] env[63241]: DEBUG oslo_concurrency.lockutils [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.846s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 486.235522] env[63241]: DEBUG nova.service [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Creating RPC server for service compute {{(pid=63241) start /opt/stack/nova/nova/service.py:186}} [ 486.251118] env[63241]: DEBUG nova.service [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] Join ServiceGroup membership for this service compute {{(pid=63241) start /opt/stack/nova/nova/service.py:203}} [ 486.251313] env[63241]: DEBUG nova.servicegroup.drivers.db [None req-634dd0aa-798d-4c64-ba8b-08019c00b7fe None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=63241) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 506.254933] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 506.758433] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Getting list of instances from cluster (obj){ [ 506.758433] env[63241]: value = "domain-c8" [ 506.758433] env[63241]: _type = "ClusterComputeResource" [ 506.758433] env[63241]: } {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 506.759666] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dadd3d-9418-47a1-94a6-54901219f40e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.768480] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Got total of 0 instances {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 506.768701] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 506.768987] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Getting list of instances from cluster (obj){ [ 506.768987] env[63241]: value = "domain-c8" [ 506.768987] env[63241]: _type = "ClusterComputeResource" [ 506.768987] env[63241]: } {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 506.769810] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604dca6c-9da0-4143-9208-db58e6d3ac9c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.776665] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Got total of 0 instances {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 534.462460] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 534.462884] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 534.463015] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 534.463139] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 534.965827] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 534.966074] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 534.966306] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 534.966519] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 534.966712] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 534.966894] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 534.967105] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 534.967274] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 534.967401] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 535.471175] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.471525] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.471627] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.471770] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 535.472669] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cb17a0-0c5e-4651-b1e4-0c106015a6b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.481320] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f416a4b4-af0b-42ca-8ef4-ae6605c6dd74 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.495020] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c05e34-f845-4b5b-9b4c-389919fbebbd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.501094] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bbf676-6c41-47db-ad42-9625f3f1146a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.529626] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181377MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 535.529750] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.529929] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.547894] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 536.548224] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 536.563464] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5010f4f1-7317-4041-afa1-b033d2fd66a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.571275] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92e6ad6-60c1-4923-b85c-a183856b18ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.602653] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01780ef4-1022-4cdd-8fe3-8a90988e0ff0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.609890] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421eec82-ccd9-4379-ba16-280cb1afbd8d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.623396] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 537.126263] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 537.631858] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 537.632243] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.102s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.616691] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 597.617085] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.121737] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.122033] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 598.122138] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 598.627422] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 598.627835] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.627835] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.628124] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.628319] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.628485] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.628633] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 598.628763] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 598.628902] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 599.132187] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.132432] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.132587] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.132743] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 599.133702] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bff2a1-d88d-4648-ac7d-55b8b0d1dcda {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.141968] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416be4a5-f8bf-49c3-b569-1c2d54bcdf83 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.155524] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4e2341-f43c-46b5-bacd-968ea3705770 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.162247] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cd076d-9556-4a2e-9e37-e43f06925980 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.190586] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181379MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 599.190725] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.190899] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.209096] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 600.209473] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 600.222176] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1644928e-0bd1-433d-bfc3-95af8d159c7c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.230033] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43c7deb-5dab-4492-8764-6a938823092d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.258578] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b17fe8-dff1-4dd9-b43e-bf45101a8db8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.265423] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3aa2419-b337-4b39-bfb8-ed18407c54b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.277962] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.781571] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.783110] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 600.783309] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.785347] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 660.785781] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 660.785781] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 660.785901] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 661.288895] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 661.292631] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 661.292631] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 661.292631] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 661.292631] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 661.292631] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 661.292631] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 661.292845] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 661.292845] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 661.793540] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.793908] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.793946] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.794106] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 661.795067] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9005d291-2ce7-433f-98e3-b90f691c5df5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.803244] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fafaf37-94c8-46bd-918b-db109bc3487c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.816892] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5734c4e8-6277-4c72-9c31-347c1861b721 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.823012] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96305e68-4991-4f56-9aae-f3c99d22cb2c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.852163] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181382MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 661.852318] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.852502] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.872491] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 662.872753] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 662.885735] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96419a0-99ea-47ff-b252-c6d8a054a5ba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.893103] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2913ca4e-934e-4524-87e4-b6f28c5c5539 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.922073] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8c4bb8-441f-448f-999a-edefe1eb2ab3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.928886] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df59507-8867-4e0b-950a-00b7505ff4d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.941811] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.445211] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 663.446468] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 663.446658] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.107965] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 722.108419] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 722.613058] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 722.613293] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 722.613332] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 723.118511] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 723.118511] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 723.118511] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 723.118511] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 723.118511] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 723.118511] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 723.118993] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 723.118993] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 723.118993] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 723.624026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.624026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.624026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.624026] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 723.624026] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f43e3aa-219e-4abf-9f87-605bd7033aee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.631890] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b847318a-00e9-4ce3-aeeb-b04fde9accb5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.646671] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212b95f1-3863-4cd3-a271-9d52844ec885 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.652927] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c53a9dd-be7d-4e77-80d8-164c14d8c3e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.682720] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181368MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 723.683065] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.683380] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.702051] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 724.702051] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 724.714790] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9593ddc4-d2d4-4157-8b35-6b67f24b7d0d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.722516] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6312cf6-4827-4a74-b8cb-32b762d262aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.751986] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d4f3a5-268b-443b-956a-c962a19f815f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.758747] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2ae13f-488f-47cf-bc93-94c68dfd663a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.771078] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.274035] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 725.275192] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 725.275375] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.453874] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.454364] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 774.454364] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.454435] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 774.961847] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] There are 0 instances to clean {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 774.962104] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.962261] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances with incomplete migration {{(pid=63241) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 775.465039] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.961556] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 778.451591] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 779.452411] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 779.452774] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 779.452774] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 779.957070] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 779.957070] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 779.957070] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 779.957070] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 779.957422] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 780.451843] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 780.955124] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.955639] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.955639] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.955800] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 780.956666] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a49f07a-28e3-493b-8206-875921d58059 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.964975] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70deea65-2c88-44c1-b5e6-4db9e2ac3a69 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.978663] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fb5c1b-231c-44c6-a58d-4ccde49560aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.984761] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01692975-21aa-42e7-a722-5f2fddd11d2e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.012596] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181369MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 781.012732] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.012889] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.030273] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 782.030549] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 782.043751] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4dac1ed-723b-4a06-8b07-e0c64b163c2b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.051383] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5ea266-f83c-4e17-85e8-f54119893b1a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.079905] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f015cbc9-ed0b-41fd-a20b-37f35b09a907 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.086808] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dffb877-c1dd-4e4e-8ba1-df0c22c188f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.099405] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.602134] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 782.603445] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 782.603624] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.600533] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.600910] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.600961] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 840.452597] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 840.452966] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 840.452966] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 840.955967] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 840.956242] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 841.451620] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 841.451849] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 841.452028] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 841.452173] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.451554] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.955884] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.956293] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.956593] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.956851] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 842.958311] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556825be-5ce1-4a14-b564-67d77f257f60 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.970408] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a95d55-dded-4bb1-8d48-477e9d3ac5dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.992920] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa8f131-aaf0-4e0c-aa7b-b627e00b39e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.002129] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f32399-38ff-4d52-925b-f541f7b670b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.050801] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181361MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 843.051236] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.051697] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.086031] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 844.086031] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 844.102853] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 844.117302] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 844.117475] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 844.128676] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 844.143642] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 844.155263] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70dcfd10-b05e-46da-8db8-627f525ce05d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.162526] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674b59c6-4e50-4527-bba7-a84809907c2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.192537] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe539da8-933d-4dfb-9b2b-726f260aaeaa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.199045] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e096d2be-8531-44ea-8f19-1cb6adb1c5bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.211296] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.714125] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 844.715446] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 844.715648] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.664s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.712022] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.953125] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.451913] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.452131] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 900.452616] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 901.451748] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 901.451930] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 901.452067] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 901.954886] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 902.451665] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 902.451912] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 902.452055] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 903.452141] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 904.452713] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 904.958018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.958018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.958018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.958018] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 904.958018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8bf8df-a3c4-4240-9e63-4fd027ee1cc8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.967385] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda1a5fe-5bc9-4af0-a188-f42f2684dbc4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.982035] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f89398-e915-451c-9254-d6813098b256 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.990145] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c7fc4b-3e97-4839-8922-bb25bcec83fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.018766] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181368MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 905.020473] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.020473] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.037168] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 906.037429] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 906.050173] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b05b13f-41d7-4de7-8844-58a11c4edf2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.058591] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f9be1e-6e0a-4466-bc44-68f1d6be826e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.087824] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0459ef8-7b5c-4f2a-a250-a0ba76d7851f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.094835] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b473745-b14c-44d0-8ddb-d992e1fd11c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.107604] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.611318] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 906.612586] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 906.612775] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.609489] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.452269] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.452504] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 960.453193] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 961.452463] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 961.452658] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 961.452784] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 961.955648] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 962.452643] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.452643] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 963.452959] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 963.452959] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 966.451321] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 966.954681] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.954920] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.955112] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.955274] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 966.956188] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1a66a2-19cd-4094-9405-348389a7da71 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.964828] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f8bfab-cbfe-4842-9e5c-d33fe6aa9795 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.980134] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646d6e96-c0e6-4790-983a-cb875cc95305 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.986359] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab85f2c4-78ef-41b4-8079-7a646ceec7ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.014245] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181365MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 967.014400] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.014557] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.031866] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 968.032159] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 968.046007] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d610d8-2f60-4784-9149-1bbd09aa8d32 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.053774] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba423095-b92d-4b9e-934b-71d20f3332f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.083610] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93340384-2357-4889-96bd-0859c2cd2bc6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.090541] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d44b4f-e9c2-40b8-8bf5-5e8da2aab6c4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.103558] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.607949] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 968.607949] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 968.607949] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.605057] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.952256] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1018.451735] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1018.451735] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1021.451791] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1023.453647] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1023.453975] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1023.454105] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1023.956943] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1023.957198] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1024.451738] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1024.452061] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1025.452122] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1027.451454] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1027.955262] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.955533] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.955704] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.955861] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1027.956803] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94197459-b19d-4296-a0d5-00bbaf898350 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.965266] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ee0dd1-be94-49e9-8c03-2be2e3d7af4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.979481] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737d4908-4013-4ea1-a8aa-d02e47b16577 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.985613] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f95684f-01e6-4ab8-9b05-638f1f5cf346 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.014553] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181345MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1028.014713] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.014876] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.034690] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1029.034690] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1029.045697] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07acf3ba-9aab-4503-a197-75ecd69e9584 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.053146] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ff62a0-5ed5-494e-91c4-9b206298869e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.081798] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38dee4d1-73a1-4e55-a343-7bb1cf10fe46 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.088551] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89828b6f-aff5-4915-ae6d-ee78d033c087 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.102007] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.605332] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1029.606649] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1029.606824] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.454132] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.454640] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.454640] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1079.958527] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] There are 0 instances to clean {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1080.956211] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.956643] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1083.453611] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.453931] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1083.453931] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1083.956802] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1083.957042] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.957603] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.957603] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.957603] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances with incomplete migration {{(pid=63241) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1084.955137] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1085.452279] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.953873] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1087.452247] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.447435] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.951819] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.456951] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.457348] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.457418] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.457558] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1090.458467] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29bf5fbb-8160-4eea-8dbf-c3c46dc3832f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.466698] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfe746f-5466-477e-b9f2-031017ead02a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.480934] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a057f6-ad16-494c-8513-8292b46a9c1b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.487200] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cbeae7-24bd-4f6d-9ac3-ebd4d1dd0d21 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.514801] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181363MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1090.514966] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.515146] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.533862] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1091.534179] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1091.546817] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df68420-52c9-4cc1-9961-04440c2dfc4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.554233] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdd8d6b-7ff3-4ed3-b024-528fcbe05c7a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.583506] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f882e7-0b0d-4877-9139-331f8ea9a32e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.590035] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75b8449-3ac0-4e3b-bbc6-4cde83c36398 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.602409] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.105719] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1092.106993] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1092.107196] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.413812] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.917155] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Getting list of instances from cluster (obj){ [ 1108.917155] env[63241]: value = "domain-c8" [ 1108.917155] env[63241]: _type = "ClusterComputeResource" [ 1108.917155] env[63241]: } {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1108.918225] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9a4968-1202-4ba3-9595-2777c3e38b54 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.926969] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Got total of 0 instances {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1137.961914] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.452284] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.452693] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1143.454152] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.454529] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1143.454529] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1143.957691] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1144.451543] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.452279] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.452632] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1147.452424] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.453018] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.452027] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.954892] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.955186] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.955314] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.955473] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1150.956416] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ba5959-19ee-49a7-a4bc-59b1cb955d29 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.965590] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f6056d-2c3a-4022-9f68-8ee8dda17cd1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.980102] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddcf7624-f654-4130-9d92-1f674937e677 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.986255] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2dfc14e-7501-4b9b-8bfe-c54a89dc8223 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.014873] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181377MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1151.015059] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.015503] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.132970] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1152.133255] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1152.148336] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1152.158992] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1152.159181] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1152.167929] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1152.182094] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1152.192498] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a48f78-23da-4dac-bbf9-7c4dfc072d48 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.199441] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46879f6-44ef-4045-9571-079a03b45698 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.229054] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286647ce-66ac-4c23-8e41-46e7f7591759 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.235903] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210d9806-e85c-40fa-ab47-3437d655ee08 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.248497] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.752061] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1152.753428] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1152.753571] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.738s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.750120] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.451738] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.452110] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1203.452616] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.452917] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1203.452917] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1203.956519] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1204.452275] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.452599] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.451542] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.452950] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.452950] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.448763] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.452104] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.955136] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.955404] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.955557] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1211.955715] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1211.956957] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b4a564-ff8e-4bd3-9acb-c98a2ff50b3c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.965591] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3cabe7-02be-4b36-8616-3d4b1edcff84 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.979443] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efcf9c4e-3ed7-404e-a11b-0ec9bb2a35a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.985703] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f061f570-5302-4577-bf56-7920a5266569 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.014915] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181361MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1212.015071] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.015260] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.035380] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1213.035380] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1213.053640] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f8ab40-8492-495d-82fe-3b08847d7ed0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.061372] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6d489d-9a1d-497d-a9e8-019075492c07 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.090477] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccab478-a7f0-4d58-8942-95c5cbefd616 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.097696] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df6e284-78b3-43e7-8504-f64e4459354b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.111392] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1213.614761] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1213.615988] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1213.616187] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.601s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.613344] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.451605] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.451990] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.451990] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1265.453120] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.453500] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1265.453500] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1265.956742] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1265.957042] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.452415] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.451968] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.452340] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.452036] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.955740] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.955980] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.962253] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.962253] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1272.962253] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9512c9-e9e1-4ff8-bc4a-95dd11ef71d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.971552] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd7fbc0-d8ac-4317-a1d1-33698dcf6895 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.985045] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4905eb1b-1fef-4299-9a76-ae483f26c1b1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.990937] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68d1c9c-4f7f-4ce6-8463-446ece47430a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.019612] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181370MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1273.019835] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.020113] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.037745] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1274.037992] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1274.050435] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b322278-055f-485f-b91f-8f86c470ecc2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.058018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472291c2-1990-4ee2-b7a3-b531f7380a5e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.086667] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94de0fb-c553-4f06-b45d-c46d9fa3fc5c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.093378] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b797a703-99e1-4b2b-98dd-c1a648f46b9a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.105985] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1274.608778] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1274.610027] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1274.610224] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1318.504358] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquiring lock "69c73342-258a-4b00-ba1b-ffdd5f247890" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.504856] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Lock "69c73342-258a-4b00-ba1b-ffdd5f247890" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.008738] env[63241]: DEBUG nova.compute.manager [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1319.552481] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.552831] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.557379] env[63241]: INFO nova.compute.claims [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1320.616989] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66562f0f-e56d-4f0c-8257-155ba1f757bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.627763] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e81265-1497-45f6-a421-cf6d77a31bff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.660619] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f2111d-d11a-4bf1-b8dc-af1888f659f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.668955] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d888f139-df32-4430-9ce0-4da1cf1bae7f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.684088] env[63241]: DEBUG nova.compute.provider_tree [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1321.188250] env[63241]: DEBUG nova.scheduler.client.report [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1321.694456] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.142s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.695098] env[63241]: DEBUG nova.compute.manager [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1322.201173] env[63241]: DEBUG nova.compute.utils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1322.202957] env[63241]: DEBUG nova.compute.manager [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Not allocating networking since 'none' was specified. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1322.605813] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.706978] env[63241]: DEBUG nova.compute.manager [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1323.291403] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquiring lock "b4182e53-50db-4256-b376-b00100778935" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.292269] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "b4182e53-50db-4256-b376-b00100778935" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.441920] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "bbb94f08-7df2-457e-bc5b-d0008839cf20" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.442047] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "bbb94f08-7df2-457e-bc5b-d0008839cf20" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.717589] env[63241]: DEBUG nova.compute.manager [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1323.794556] env[63241]: DEBUG nova.compute.manager [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1323.945820] env[63241]: DEBUG nova.virt.hardware [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1323.945820] env[63241]: DEBUG nova.virt.hardware [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1323.945820] env[63241]: DEBUG nova.virt.hardware [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1323.946059] env[63241]: DEBUG nova.virt.hardware [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1323.946059] env[63241]: DEBUG nova.virt.hardware [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1323.946059] env[63241]: DEBUG nova.virt.hardware [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1323.946446] env[63241]: DEBUG nova.virt.hardware [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1323.946735] env[63241]: DEBUG nova.virt.hardware [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1323.947143] env[63241]: DEBUG nova.virt.hardware [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1323.947587] env[63241]: DEBUG nova.virt.hardware [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1323.947970] env[63241]: DEBUG nova.virt.hardware [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1323.950016] env[63241]: DEBUG nova.compute.manager [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1323.952598] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3029fa9-898a-4373-8f0c-564e7d42addb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.966018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10ff6cb-9e57-41d1-aa7a-7abade9c4acb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.986693] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90cf658-d746-40ac-b595-0a81cc1fcfc3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.010786] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1324.023023] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1324.023023] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d15517d3-5ec0-4b60-afe5-c06634a5948c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.039042] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Created folder: OpenStack in parent group-v4. [ 1324.039042] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Creating folder: Project (79da84454287473ea95c2903f5764d85). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1324.039042] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0c1b1a8-3924-438c-bcef-d9719a7dd2f6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.047680] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Created folder: Project (79da84454287473ea95c2903f5764d85) in parent group-v376927. [ 1324.047964] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Creating folder: Instances. Parent ref: group-v376928. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1324.048954] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26ea4644-b104-4e53-bfea-87154c9609a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.057153] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Created folder: Instances in parent group-v376928. [ 1324.057432] env[63241]: DEBUG oslo.service.loopingcall [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1324.057631] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1324.057822] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56fdbfd3-c271-445e-90c2-4e5ea1cb328c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.088192] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1324.088192] env[63241]: value = "task-1819761" [ 1324.088192] env[63241]: _type = "Task" [ 1324.088192] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.107971] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819761, 'name': CreateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.333560] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.333862] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.337121] env[63241]: INFO nova.compute.claims [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1324.346865] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquiring lock "0440c0a8-f065-4a82-b190-33279e7c0d93" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.348786] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Lock "0440c0a8-f065-4a82-b190-33279e7c0d93" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.451442] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.451703] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1324.481458] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.601628] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819761, 'name': CreateVM_Task, 'duration_secs': 0.302519} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.601841] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1324.602816] env[63241]: DEBUG oslo_vmware.service [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7163ba-1f89-4196-91ce-653c4505bac3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.611664] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1324.611825] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.612518] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1324.612838] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ad1c3c8-ccae-4e87-a5ac-33a881612564 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.617528] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1324.617528] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a31414-4411-0769-4d90-ecd9dcba8475" [ 1324.617528] env[63241]: _type = "Task" [ 1324.617528] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.628849] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a31414-4411-0769-4d90-ecd9dcba8475, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.853915] env[63241]: DEBUG nova.compute.manager [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1325.127850] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.128127] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1325.128363] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.128509] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.129022] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.129261] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a1aff21-3884-4390-ab8b-004b613dda69 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.146722] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.146860] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1325.147584] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2a881e-e7ca-426f-8903-f3d8d6cbf3bd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.153853] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e87965df-6d9a-4533-bdb4-57da0bc235b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.158795] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1325.158795] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d4686f-9b35-022f-f479-92d01a163b26" [ 1325.158795] env[63241]: _type = "Task" [ 1325.158795] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.166064] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d4686f-9b35-022f-f479-92d01a163b26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.389388] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.445830] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7059451a-9932-4b72-8332-89973350b932 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.452817] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1325.452817] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1325.452817] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1325.458904] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffac7f73-efea-4557-920d-34a9ae1e4415 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.505770] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a68bb1-528d-4abf-9b83-2b6d6b2e7d44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.513606] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a3c9a9-8cf7-44e7-8ec6-fb97d1e0b19a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.528536] env[63241]: DEBUG nova.compute.provider_tree [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.676516] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Preparing fetch location {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1325.676516] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Creating directory with path [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1325.676516] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-edf06010-7050-445b-9cde-d7b24639c601 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.704624] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Created directory with path [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1325.705173] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Fetch image to [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/tmp-sparse.vmdk {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1325.705173] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Downloading image file data e128f8d9-813d-4846-9a6e-b4c4717cd5b4 to [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/tmp-sparse.vmdk on the data store datastore1 {{(pid=63241) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1325.705878] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542248d9-3adf-4b0a-98b1-6fab9425f123 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.715846] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daeffda0-7671-4a1b-bd3e-ca65e439881c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.729732] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91591f2c-040c-4eb4-afec-3ef9c86d6a7a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.772019] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f87645-b3bb-442b-8897-14c156f7ce6f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.777879] env[63241]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-33a19f50-3cd8-41ce-be55-dba3ce2cf6e3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.869205] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Downloading image file data e128f8d9-813d-4846-9a6e-b4c4717cd5b4 to the data store datastore1 {{(pid=63241) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1325.950640] env[63241]: DEBUG oslo_vmware.rw_handles [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63241) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1326.011136] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Skipping network cache update for instance because it is Building. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1326.011279] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: b4182e53-50db-4256-b376-b00100778935] Skipping network cache update for instance because it is Building. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1326.011413] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1326.011630] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1326.032429] env[63241]: DEBUG nova.scheduler.client.report [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1326.249455] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquiring lock "780f3eee-f6c7-4054-8e6e-a370f74dc405" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.249761] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Lock "780f3eee-f6c7-4054-8e6e-a370f74dc405" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.539168] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.205s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1326.539703] env[63241]: DEBUG nova.compute.manager [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1326.545942] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.062s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.545942] env[63241]: INFO nova.compute.claims [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1326.610363] env[63241]: DEBUG oslo_vmware.rw_handles [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Completed reading data from the image iterator. {{(pid=63241) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1326.610363] env[63241]: DEBUG oslo_vmware.rw_handles [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1326.747864] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Downloaded image file data e128f8d9-813d-4846-9a6e-b4c4717cd5b4 to vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/tmp-sparse.vmdk on the data store datastore1 {{(pid=63241) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1326.749464] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Caching image {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1326.750947] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Copying Virtual Disk [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/tmp-sparse.vmdk to [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1326.751266] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe5b4c5d-efdb-46f3-b79b-23bed4c320f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.758742] env[63241]: DEBUG nova.compute.manager [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1326.771821] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1326.771821] env[63241]: value = "task-1819766" [ 1326.771821] env[63241]: _type = "Task" [ 1326.771821] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.785410] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819766, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.051234] env[63241]: DEBUG nova.compute.utils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1327.052638] env[63241]: DEBUG nova.compute.manager [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Not allocating networking since 'none' was specified. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1327.305143] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819766, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.315274] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.451857] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.555377] env[63241]: DEBUG nova.compute.manager [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1327.694647] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b32075-fc12-453e-a191-b9b053ee07aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.701808] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921da357-d905-4ecb-9f2d-08396fef012f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.738031] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6444545b-7423-4948-8f59-4ed1116c2273 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.745890] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcaa9f91-ba68-445c-8ab5-497ca96d1a9d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.761974] env[63241]: DEBUG nova.compute.provider_tree [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.784488] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819766, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.69064} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.784488] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Copied Virtual Disk [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/tmp-sparse.vmdk to [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1327.784580] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Deleting the datastore file [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/tmp-sparse.vmdk {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1327.785494] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78907e07-9c8a-4ad5-9047-bb2341fd8c24 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.797093] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1327.797093] env[63241]: value = "task-1819767" [ 1327.797093] env[63241]: _type = "Task" [ 1327.797093] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.806438] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819767, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.268348] env[63241]: DEBUG nova.scheduler.client.report [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1328.326052] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819767, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026154} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.326361] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1328.326831] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Moving file from [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081/e128f8d9-813d-4846-9a6e-b4c4717cd5b4 to [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4. {{(pid=63241) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1328.327267] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-a7e187a1-75ae-4ce0-8977-572a8d5636ef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.349161] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1328.349161] env[63241]: value = "task-1819768" [ 1328.349161] env[63241]: _type = "Task" [ 1328.349161] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.366894] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819768, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.566638] env[63241]: DEBUG nova.compute.manager [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1328.595746] env[63241]: DEBUG nova.virt.hardware [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1328.596082] env[63241]: DEBUG nova.virt.hardware [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1328.596210] env[63241]: DEBUG nova.virt.hardware [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1328.596548] env[63241]: DEBUG nova.virt.hardware [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1328.596548] env[63241]: DEBUG nova.virt.hardware [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1328.596678] env[63241]: DEBUG nova.virt.hardware [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1328.596886] env[63241]: DEBUG nova.virt.hardware [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1328.597260] env[63241]: DEBUG nova.virt.hardware [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1328.597530] env[63241]: DEBUG nova.virt.hardware [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1328.598371] env[63241]: DEBUG nova.virt.hardware [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1328.598543] env[63241]: DEBUG nova.virt.hardware [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1328.600428] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b67515a-6b3e-4c8b-be18-002e251af7ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.611542] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91b4615-f368-4f25-b4d3-66d90d4b7e61 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.637983] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1328.644794] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Creating folder: Project (c82375b652614784b5cef5ee3b58641f). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1328.645139] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c34de31e-b1a8-4911-af69-7b82ec670707 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.658241] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Created folder: Project (c82375b652614784b5cef5ee3b58641f) in parent group-v376927. [ 1328.658241] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Creating folder: Instances. Parent ref: group-v376934. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1328.658340] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79addd05-463d-4443-8e0f-aead1bf27cd8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.670449] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Created folder: Instances in parent group-v376934. [ 1328.674270] env[63241]: DEBUG oslo.service.loopingcall [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1328.674270] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4182e53-50db-4256-b376-b00100778935] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1328.674270] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aadfa4c9-f869-416f-bff8-99d470980f9c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.691936] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1328.691936] env[63241]: value = "task-1819772" [ 1328.691936] env[63241]: _type = "Task" [ 1328.691936] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.700225] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819772, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.779025] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.779025] env[63241]: DEBUG nova.compute.manager [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1328.780108] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.391s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.782910] env[63241]: INFO nova.compute.claims [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1328.860921] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819768, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.030898} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.861162] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] File moved {{(pid=63241) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1328.861446] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Cleaning up location [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1328.861608] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Deleting the datastore file [datastore1] vmware_temp/72156f07-1f40-4914-92b6-a5b6120a0081 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1328.861864] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca1181f3-762b-41d0-ab23-3e96c10fd771 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.870344] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1328.870344] env[63241]: value = "task-1819773" [ 1328.870344] env[63241]: _type = "Task" [ 1328.870344] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.881346] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819773, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.204418] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819772, 'name': CreateVM_Task, 'duration_secs': 0.330139} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.204603] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4182e53-50db-4256-b376-b00100778935] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1329.205252] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.205417] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.205734] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1329.206024] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fc356ed-4228-4d95-8716-1387ffe07504 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.210644] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1329.210644] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522853a4-376c-abb1-012f-b21a96f2e15c" [ 1329.210644] env[63241]: _type = "Task" [ 1329.210644] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.222109] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522853a4-376c-abb1-012f-b21a96f2e15c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.293320] env[63241]: DEBUG nova.compute.utils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1329.293320] env[63241]: DEBUG nova.compute.manager [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1329.293320] env[63241]: DEBUG nova.network.neutron [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1329.387638] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819773, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.058963} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.387638] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1329.388103] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13b40996-1014-4967-b520-006babd0953e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.398881] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1329.398881] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5257ecaa-11c3-1c46-2050-9b50d8890e4d" [ 1329.398881] env[63241]: _type = "Task" [ 1329.398881] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.421111] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5257ecaa-11c3-1c46-2050-9b50d8890e4d, 'name': SearchDatastore_Task, 'duration_secs': 0.009301} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.421375] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.423022] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 69c73342-258a-4b00-ba1b-ffdd5f247890/69c73342-258a-4b00-ba1b-ffdd5f247890.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1329.423022] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8a7e204-dc88-4247-b47a-d9f748de91f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.430584] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1329.430584] env[63241]: value = "task-1819774" [ 1329.430584] env[63241]: _type = "Task" [ 1329.430584] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.442250] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819774, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.522108] env[63241]: DEBUG nova.policy [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dbaf926b7294426ea90de8c089597ec0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '391c095b46d94ceb97fb48dcddf60d94', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1329.725974] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522853a4-376c-abb1-012f-b21a96f2e15c, 'name': SearchDatastore_Task, 'duration_secs': 0.011009} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.726653] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.726653] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1329.726653] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.727762] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.728035] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1329.728814] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91ca06f3-f311-4a11-81f7-6f3221a96d0b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.744769] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1329.745040] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1329.745899] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18420cd9-3b56-4fea-9079-9932e4fd273d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.754858] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1329.754858] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52267105-1512-9f25-d461-4014d8d0afc1" [ 1329.754858] env[63241]: _type = "Task" [ 1329.754858] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.765342] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52267105-1512-9f25-d461-4014d8d0afc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.800990] env[63241]: DEBUG nova.compute.manager [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1329.948141] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819774, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.428843} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.948781] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 69c73342-258a-4b00-ba1b-ffdd5f247890/69c73342-258a-4b00-ba1b-ffdd5f247890.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1329.949056] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1329.949328] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb28c510-d99e-41bf-a842-14d8fe387441 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.956668] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1329.956668] env[63241]: value = "task-1819775" [ 1329.956668] env[63241]: _type = "Task" [ 1329.956668] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.968683] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819775, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.973105] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24949fb2-06f0-4c1e-a8da-b4586be23ab5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.979738] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b94b4df-b794-4930-9650-5df128f4b0d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.015611] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8e655d-ff96-48d5-9a16-468ec381c059 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.024206] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d76c1a-f737-4709-a012-cb145b8d68f4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.039674] env[63241]: DEBUG nova.compute.provider_tree [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1330.084021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "a1a8342a-b00e-42c1-8c01-a95659a78caf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.084268] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "a1a8342a-b00e-42c1-8c01-a95659a78caf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.268261] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52267105-1512-9f25-d461-4014d8d0afc1, 'name': SearchDatastore_Task, 'duration_secs': 0.045275} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.268942] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-223e4b03-fb06-4dd8-aedf-52d2d5a42e35 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.274241] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1330.274241] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c98d58-0549-5a8b-4f58-416906e54843" [ 1330.274241] env[63241]: _type = "Task" [ 1330.274241] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.282272] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c98d58-0549-5a8b-4f58-416906e54843, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.452378] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.452378] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.452702] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.470809] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819775, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079458} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.471115] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1330.472598] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8456ec93-d081-4892-919d-bf8ce2aa01e5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.498111] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 69c73342-258a-4b00-ba1b-ffdd5f247890/69c73342-258a-4b00-ba1b-ffdd5f247890.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1330.498785] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5ee2858-6c66-483e-89a2-e227cfd272ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.522079] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1330.522079] env[63241]: value = "task-1819777" [ 1330.522079] env[63241]: _type = "Task" [ 1330.522079] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.532159] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819777, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.569726] env[63241]: ERROR nova.scheduler.client.report [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [req-0920222e-bbac-438b-b802-470453cf5413] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0920222e-bbac-438b-b802-470453cf5413"}]} [ 1330.588700] env[63241]: DEBUG nova.compute.manager [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1330.593672] env[63241]: DEBUG nova.scheduler.client.report [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1330.615657] env[63241]: DEBUG nova.scheduler.client.report [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1330.615924] env[63241]: DEBUG nova.compute.provider_tree [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1330.639810] env[63241]: DEBUG nova.scheduler.client.report [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1330.667085] env[63241]: DEBUG nova.scheduler.client.report [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1330.800551] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c98d58-0549-5a8b-4f58-416906e54843, 'name': SearchDatastore_Task, 'duration_secs': 0.008509} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.801528] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.801528] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1330.801528] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45530ef7-295b-4184-ac6b-ca0b1b9f4974 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.809152] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1332d9fb-2b19-4d9c-80e9-963ba590e142 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.813141] env[63241]: DEBUG nova.compute.manager [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1330.817949] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1330.817949] env[63241]: value = "task-1819778" [ 1330.817949] env[63241]: _type = "Task" [ 1330.817949] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.827618] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819778, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.830630] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e1dcc0-7cdc-4402-a97f-1fd6f9a036b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.864575] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995958b8-e5cb-4ef0-8d14-57da6661fec1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.872770] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436d242b-f3df-4a9a-a0e7-a69925687a4d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.879860] env[63241]: DEBUG nova.virt.hardware [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1330.881270] env[63241]: DEBUG nova.virt.hardware [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1330.881270] env[63241]: DEBUG nova.virt.hardware [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1330.881270] env[63241]: DEBUG nova.virt.hardware [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1330.881270] env[63241]: DEBUG nova.virt.hardware [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1330.881270] env[63241]: DEBUG nova.virt.hardware [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1330.881742] env[63241]: DEBUG nova.virt.hardware [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1330.881742] env[63241]: DEBUG nova.virt.hardware [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1330.881742] env[63241]: DEBUG nova.virt.hardware [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1330.881742] env[63241]: DEBUG nova.virt.hardware [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1330.881901] env[63241]: DEBUG nova.virt.hardware [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1330.882687] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888953b7-86e7-4b17-82fe-40325dbf487e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.897187] env[63241]: DEBUG nova.compute.provider_tree [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1330.906421] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cd46a5-9b39-477a-851b-7290ca60f106 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.038865] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.134302] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.244353] env[63241]: DEBUG nova.network.neutron [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Successfully created port: 2295e83d-9394-4f35-be55-49b2eb1f271b {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1331.328502] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819778, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465553} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.328755] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1331.328919] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1331.329186] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fbe7e99a-32d5-4937-ad15-0bea6eab6292 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.338318] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1331.338318] env[63241]: value = "task-1819779" [ 1331.338318] env[63241]: _type = "Task" [ 1331.338318] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.354305] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819779, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.427741] env[63241]: ERROR nova.scheduler.client.report [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [req-5d03bf3f-8da8-446b-b223-0664613e6322] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5d03bf3f-8da8-446b-b223-0664613e6322"}]} [ 1331.460897] env[63241]: DEBUG nova.scheduler.client.report [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1331.537672] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819777, 'name': ReconfigVM_Task, 'duration_secs': 0.899811} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.537672] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 69c73342-258a-4b00-ba1b-ffdd5f247890/69c73342-258a-4b00-ba1b-ffdd5f247890.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1331.537672] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4bd98b3-6702-46ff-814a-770932612a4b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.546030] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1331.546030] env[63241]: value = "task-1819780" [ 1331.546030] env[63241]: _type = "Task" [ 1331.546030] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.556041] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819780, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.557534] env[63241]: DEBUG nova.scheduler.client.report [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1331.557763] env[63241]: DEBUG nova.compute.provider_tree [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1331.577046] env[63241]: DEBUG nova.scheduler.client.report [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1331.605422] env[63241]: DEBUG nova.scheduler.client.report [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1331.728605] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "0e5447fd-a04f-4bc2-b329-e015883773b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.728605] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "0e5447fd-a04f-4bc2-b329-e015883773b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.852928] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819779, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062114} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.853249] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1331.854094] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba961c1-da8f-4e73-ae4c-98c22ed646fc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.884985] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1331.885350] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39b03911-6785-43d6-90fc-6f4dfbc0b1bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.903083] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2993622c-a5d8-4e2a-a551-649d5db7a39e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.914170] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f3f7fc-9ce6-4cd6-a03b-26c26cf14ca4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.918590] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1331.918590] env[63241]: value = "task-1819781" [ 1331.918590] env[63241]: _type = "Task" [ 1331.918590] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.951706] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c1f389-60b8-428c-a485-81e8c40f25bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.960149] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819781, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.966622] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46430962-c526-4125-ba93-213fbf178971 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.989966] env[63241]: DEBUG nova.compute.provider_tree [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1332.057420] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819780, 'name': Rename_Task, 'duration_secs': 0.148581} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.058751] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1332.058751] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c82bf5b5-540f-43cb-bb06-4d63cc9f524e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.066966] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1332.066966] env[63241]: value = "task-1819782" [ 1332.066966] env[63241]: _type = "Task" [ 1332.066966] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.082193] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819782, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.233663] env[63241]: DEBUG nova.compute.manager [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1332.439805] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819781, 'name': ReconfigVM_Task, 'duration_secs': 0.327789} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.440157] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Reconfigured VM instance instance-00000002 to attach disk [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1332.440831] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d528d031-81a7-48ad-86f6-1815ddb412cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.450507] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1332.450507] env[63241]: value = "task-1819783" [ 1332.450507] env[63241]: _type = "Task" [ 1332.450507] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.468150] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819783, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.556535] env[63241]: DEBUG nova.scheduler.client.report [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 11 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1332.556535] env[63241]: DEBUG nova.compute.provider_tree [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 11 to 12 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1332.556717] env[63241]: DEBUG nova.compute.provider_tree [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1332.592940] env[63241]: DEBUG oslo_vmware.api [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819782, 'name': PowerOnVM_Task, 'duration_secs': 0.483977} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.594339] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1332.595010] env[63241]: INFO nova.compute.manager [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Took 8.88 seconds to spawn the instance on the hypervisor. [ 1332.595474] env[63241]: DEBUG nova.compute.manager [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1332.597211] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f0519b-f475-4267-8ac9-4952ac81049c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.771044] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.975292] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819783, 'name': Rename_Task, 'duration_secs': 0.18983} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.975614] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1332.975901] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-525f92d5-0c53-425c-9362-cdbd0d27af72 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.987466] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1332.987466] env[63241]: value = "task-1819785" [ 1332.987466] env[63241]: _type = "Task" [ 1332.987466] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.001295] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819785, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.070562] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.290s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.071231] env[63241]: DEBUG nova.compute.manager [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1333.073867] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.759s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.077871] env[63241]: INFO nova.compute.claims [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1333.127889] env[63241]: INFO nova.compute.manager [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Took 13.61 seconds to build instance. [ 1333.448662] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.499295] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819785, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.581532] env[63241]: DEBUG nova.compute.utils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1333.589458] env[63241]: DEBUG nova.compute.manager [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1333.589727] env[63241]: DEBUG nova.network.neutron [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1333.631482] env[63241]: DEBUG oslo_concurrency.lockutils [None req-12ebed9d-fb90-475b-99dc-d5f2f351afc3 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Lock "69c73342-258a-4b00-ba1b-ffdd5f247890" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.125s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.800209] env[63241]: DEBUG nova.policy [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f188015a3fc54ec0bf23ee8077a0b5a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a98c5e44c88f4470a83a16d1ba190a47', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1334.001199] env[63241]: DEBUG oslo_vmware.api [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819785, 'name': PowerOnVM_Task, 'duration_secs': 0.857996} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.001914] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1334.001914] env[63241]: INFO nova.compute.manager [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Took 5.43 seconds to spawn the instance on the hypervisor. [ 1334.001914] env[63241]: DEBUG nova.compute.manager [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1334.002851] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7647ea8-f825-4a36-b498-9e6b4bf4944f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.092508] env[63241]: DEBUG nova.compute.manager [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1334.259900] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6f16cf-b3d9-4ae3-866c-0414d346fbc2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.272431] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a43555-9e95-4f93-8657-f9059a70fd7a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.325994] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95eb34f7-9f0c-4edd-a971-408195a515e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.336599] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ea73c5-de07-44e4-af37-e2f50dbceed1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.353044] env[63241]: DEBUG nova.compute.provider_tree [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1334.402773] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.403839] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.453027] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.524268] env[63241]: INFO nova.compute.manager [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Took 10.22 seconds to build instance. [ 1334.859242] env[63241]: DEBUG nova.scheduler.client.report [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1334.910662] env[63241]: DEBUG nova.compute.manager [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1334.956802] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.028052] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ed337124-9b7d-428e-9c14-683ed7a5f7ed tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "b4182e53-50db-4256-b376-b00100778935" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.735s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.105511] env[63241]: DEBUG nova.compute.manager [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1335.146225] env[63241]: DEBUG nova.virt.hardware [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1335.146730] env[63241]: DEBUG nova.virt.hardware [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1335.146928] env[63241]: DEBUG nova.virt.hardware [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1335.147114] env[63241]: DEBUG nova.virt.hardware [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1335.147266] env[63241]: DEBUG nova.virt.hardware [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1335.147514] env[63241]: DEBUG nova.virt.hardware [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1335.147685] env[63241]: DEBUG nova.virt.hardware [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1335.147794] env[63241]: DEBUG nova.virt.hardware [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1335.147971] env[63241]: DEBUG nova.virt.hardware [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1335.148165] env[63241]: DEBUG nova.virt.hardware [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1335.148356] env[63241]: DEBUG nova.virt.hardware [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1335.149437] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f29f6be-4fe5-4a50-986b-3594d5f8e963 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.162220] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6920d5ce-3d0a-4ba3-ad11-1284d63ffda8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.365939] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.292s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.366388] env[63241]: DEBUG nova.compute.manager [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1335.375465] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.241s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.377276] env[63241]: INFO nova.compute.claims [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1335.451363] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.881736] env[63241]: DEBUG nova.compute.utils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1335.888600] env[63241]: DEBUG nova.compute.manager [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1335.888743] env[63241]: DEBUG nova.network.neutron [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1335.958936] env[63241]: DEBUG nova.compute.manager [None req-2fc739c8-dd40-4bcc-b413-ea96ec6f9ebb tempest-ServerDiagnosticsV248Test-797217895 tempest-ServerDiagnosticsV248Test-797217895-project-admin] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1335.961970] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f853b194-a427-4999-978a-0ed0529f0b5b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.974796] env[63241]: INFO nova.compute.manager [None req-2fc739c8-dd40-4bcc-b413-ea96ec6f9ebb tempest-ServerDiagnosticsV248Test-797217895 tempest-ServerDiagnosticsV248Test-797217895-project-admin] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Retrieving diagnostics [ 1335.975182] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e762b25d-6520-4363-947c-00c932eef4e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.080761] env[63241]: DEBUG nova.network.neutron [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Successfully updated port: 2295e83d-9394-4f35-be55-49b2eb1f271b {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1336.121184] env[63241]: DEBUG nova.policy [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a63d572477824df3b5411d1c77e4148f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '06bbbe738ef34806971a4883b7bb3cc4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1336.373484] env[63241]: DEBUG nova.network.neutron [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Successfully created port: 7168ddd9-eca4-4ba1-a734-ef4f493aa646 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1336.393319] env[63241]: DEBUG nova.compute.manager [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1336.585502] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "refresh_cache-bbb94f08-7df2-457e-bc5b-d0008839cf20" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.585502] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquired lock "refresh_cache-bbb94f08-7df2-457e-bc5b-d0008839cf20" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.585719] env[63241]: DEBUG nova.network.neutron [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1336.592173] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb13e699-3d6e-4df5-9b41-0ff251018775 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.605820] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f64156-6fe6-4d8f-b577-61b6ccfde799 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.637328] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9873b6-3355-40e8-8f78-1695f4a60cbb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.650497] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f41274-b591-4092-9b67-f01e3385fc3b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.662057] env[63241]: DEBUG nova.compute.provider_tree [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1337.166714] env[63241]: DEBUG nova.scheduler.client.report [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1337.231059] env[63241]: DEBUG nova.network.neutron [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1337.413316] env[63241]: DEBUG nova.compute.manager [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1337.442967] env[63241]: DEBUG nova.virt.hardware [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1337.445326] env[63241]: DEBUG nova.virt.hardware [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1337.445326] env[63241]: DEBUG nova.virt.hardware [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1337.445326] env[63241]: DEBUG nova.virt.hardware [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1337.445326] env[63241]: DEBUG nova.virt.hardware [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1337.445326] env[63241]: DEBUG nova.virt.hardware [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1337.445640] env[63241]: DEBUG nova.virt.hardware [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1337.446014] env[63241]: DEBUG nova.virt.hardware [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1337.446303] env[63241]: DEBUG nova.virt.hardware [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1337.446872] env[63241]: DEBUG nova.virt.hardware [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1337.446872] env[63241]: DEBUG nova.virt.hardware [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1337.447997] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5ab92c-a389-4b64-9b01-357c0f817c64 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.460655] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bac70b1-d1a6-42ca-9490-847036849e29 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.675884] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.675884] env[63241]: DEBUG nova.compute.manager [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1337.682135] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.911s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.685190] env[63241]: INFO nova.compute.claims [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1337.946288] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquiring lock "97890eda-0c1d-4423-acd2-60d3097c6f8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.946577] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Lock "97890eda-0c1d-4423-acd2-60d3097c6f8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.064748] env[63241]: INFO nova.compute.manager [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Rebuilding instance [ 1338.137187] env[63241]: DEBUG nova.compute.manager [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1338.138108] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bf8b02-bdf7-4a13-801a-10c7a1c3cdae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.168174] env[63241]: DEBUG nova.network.neutron [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Successfully created port: 39b9ee92-fa8c-4018-be8f-6ad78d44a1a8 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1338.203215] env[63241]: DEBUG nova.compute.utils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1338.204706] env[63241]: DEBUG nova.compute.manager [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1338.204889] env[63241]: DEBUG nova.network.neutron [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1338.210898] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.211199] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.219038] env[63241]: DEBUG nova.compute.manager [req-5d7ce320-f69c-4f33-bce0-d4c5248a7692 req-8e17482e-bcae-4aee-a1e0-532e48346290 service nova] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Received event network-vif-plugged-2295e83d-9394-4f35-be55-49b2eb1f271b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1338.219252] env[63241]: DEBUG oslo_concurrency.lockutils [req-5d7ce320-f69c-4f33-bce0-d4c5248a7692 req-8e17482e-bcae-4aee-a1e0-532e48346290 service nova] Acquiring lock "bbb94f08-7df2-457e-bc5b-d0008839cf20-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.219452] env[63241]: DEBUG oslo_concurrency.lockutils [req-5d7ce320-f69c-4f33-bce0-d4c5248a7692 req-8e17482e-bcae-4aee-a1e0-532e48346290 service nova] Lock "bbb94f08-7df2-457e-bc5b-d0008839cf20-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.219611] env[63241]: DEBUG oslo_concurrency.lockutils [req-5d7ce320-f69c-4f33-bce0-d4c5248a7692 req-8e17482e-bcae-4aee-a1e0-532e48346290 service nova] Lock "bbb94f08-7df2-457e-bc5b-d0008839cf20-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.219774] env[63241]: DEBUG nova.compute.manager [req-5d7ce320-f69c-4f33-bce0-d4c5248a7692 req-8e17482e-bcae-4aee-a1e0-532e48346290 service nova] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] No waiting events found dispatching network-vif-plugged-2295e83d-9394-4f35-be55-49b2eb1f271b {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1338.219931] env[63241]: WARNING nova.compute.manager [req-5d7ce320-f69c-4f33-bce0-d4c5248a7692 req-8e17482e-bcae-4aee-a1e0-532e48346290 service nova] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Received unexpected event network-vif-plugged-2295e83d-9394-4f35-be55-49b2eb1f271b for instance with vm_state building and task_state spawning. [ 1338.323743] env[63241]: DEBUG nova.network.neutron [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Updating instance_info_cache with network_info: [{"id": "2295e83d-9394-4f35-be55-49b2eb1f271b", "address": "fa:16:3e:b5:4c:33", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2295e83d-93", "ovs_interfaceid": "2295e83d-9394-4f35-be55-49b2eb1f271b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1338.344714] env[63241]: DEBUG nova.policy [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53f5b40d7cf04b3d8702df00367b22a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38c709b68d2a40049d6d4795267987d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1338.452830] env[63241]: DEBUG nova.compute.manager [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1338.660821] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1338.661225] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7c5c57e-1337-42b5-8e3f-9af1a70621fc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.669512] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1338.669512] env[63241]: value = "task-1819788" [ 1338.669512] env[63241]: _type = "Task" [ 1338.669512] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.680198] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819788, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.706783] env[63241]: DEBUG nova.compute.manager [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1338.716575] env[63241]: DEBUG nova.compute.manager [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1338.833014] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Releasing lock "refresh_cache-bbb94f08-7df2-457e-bc5b-d0008839cf20" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.833014] env[63241]: DEBUG nova.compute.manager [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Instance network_info: |[{"id": "2295e83d-9394-4f35-be55-49b2eb1f271b", "address": "fa:16:3e:b5:4c:33", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2295e83d-93", "ovs_interfaceid": "2295e83d-9394-4f35-be55-49b2eb1f271b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1338.833225] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:4c:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2295e83d-9394-4f35-be55-49b2eb1f271b', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1338.842863] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Creating folder: Project (391c095b46d94ceb97fb48dcddf60d94). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1338.843882] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8fbfcf4-ec64-4ab0-9435-b39572baecc1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.862030] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Created folder: Project (391c095b46d94ceb97fb48dcddf60d94) in parent group-v376927. [ 1338.862030] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Creating folder: Instances. Parent ref: group-v376938. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1338.862030] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-251b9ee5-050c-4238-a8e6-db33b12e95be {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.871271] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Created folder: Instances in parent group-v376938. [ 1338.871631] env[63241]: DEBUG oslo.service.loopingcall [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1338.871847] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1338.872783] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f7186f1-bcb7-48c0-b108-d5dabe28ad37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.905251] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1338.905251] env[63241]: value = "task-1819791" [ 1338.905251] env[63241]: _type = "Task" [ 1338.905251] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.915175] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819791, 'name': CreateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.955471] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aab5f02-bbf1-433c-a04e-481b5d54047b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.973015] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae256df3-9a19-4bc0-9a9d-8e601fbb761a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.022976] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.022976] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0944388-305e-4e1c-b56c-f3e6910d17b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.031466] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d765913-03a2-4ef5-aad2-63697c955c25 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.050689] env[63241]: DEBUG nova.compute.provider_tree [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1339.184588] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819788, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.250843] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.419957] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819791, 'name': CreateVM_Task, 'duration_secs': 0.428559} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.420149] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1339.532366] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.533860] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.533860] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1339.533860] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85d89d43-52f3-421a-9325-4302fb4b209c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.539857] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1339.539857] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52eea914-bb4a-a0bd-424b-c5252d29bc26" [ 1339.539857] env[63241]: _type = "Task" [ 1339.539857] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.547792] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52eea914-bb4a-a0bd-424b-c5252d29bc26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.553859] env[63241]: DEBUG nova.scheduler.client.report [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1339.682158] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819788, 'name': PowerOffVM_Task, 'duration_secs': 0.942366} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.682816] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1339.682816] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1339.683820] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1571713c-6842-4b40-aef0-386de2277a1a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.691499] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1339.691606] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7c45d64-4d15-41b9-81a5-e7c4722cbce8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.716378] env[63241]: DEBUG nova.compute.manager [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1339.720854] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1339.721352] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1339.721352] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Deleting the datastore file [datastore1] b4182e53-50db-4256-b376-b00100778935 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1339.721738] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e06f783-2cdb-4448-a59b-15dfc8b76b20 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.728513] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1339.728513] env[63241]: value = "task-1819794" [ 1339.728513] env[63241]: _type = "Task" [ 1339.728513] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.737965] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819794, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.761752] env[63241]: DEBUG nova.virt.hardware [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1339.761752] env[63241]: DEBUG nova.virt.hardware [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1339.761941] env[63241]: DEBUG nova.virt.hardware [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1339.762012] env[63241]: DEBUG nova.virt.hardware [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1339.764959] env[63241]: DEBUG nova.virt.hardware [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1339.764959] env[63241]: DEBUG nova.virt.hardware [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1339.764959] env[63241]: DEBUG nova.virt.hardware [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1339.764959] env[63241]: DEBUG nova.virt.hardware [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1339.764959] env[63241]: DEBUG nova.virt.hardware [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1339.765506] env[63241]: DEBUG nova.virt.hardware [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1339.765506] env[63241]: DEBUG nova.virt.hardware [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1339.765506] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5342064-7b97-41db-a594-69f529f9aa0b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.773685] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ad7a5b-8818-4d32-ac43-0ac6871a7a4d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.952781] env[63241]: DEBUG nova.network.neutron [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Successfully created port: bc8209bb-1cd1-4efc-806c-3fb04ffc73c5 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1340.052333] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52eea914-bb4a-a0bd-424b-c5252d29bc26, 'name': SearchDatastore_Task, 'duration_secs': 0.008769} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.052333] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.052865] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1340.052865] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1340.053175] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.053175] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1340.053385] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5076658-32fd-499b-ac2c-9cbd8c99177c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.058764] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.059267] env[63241]: DEBUG nova.compute.manager [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1340.062230] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.105s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.062230] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.062230] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1340.062382] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.611s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.063834] env[63241]: INFO nova.compute.claims [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1340.067096] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81383f9-dcdb-4f11-9975-7134710856a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.075503] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1340.075696] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1340.082187] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69fc39ca-c164-4965-85d8-9053b641d748 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.095024] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f433b5-b7dc-4130-840e-28608cab42ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.102900] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1340.102900] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a8e05b-543a-2769-b896-425e26ef5629" [ 1340.102900] env[63241]: _type = "Task" [ 1340.102900] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.119158] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bbb2b2-f423-47d3-9cc8-93db518d231b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.125751] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a8e05b-543a-2769-b896-425e26ef5629, 'name': SearchDatastore_Task, 'duration_secs': 0.010257} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.128000] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d54ae1f-6fab-479d-b5fa-d6f3ad9178dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.133498] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5101037c-be82-4327-ae8a-353b85c4daaf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.138942] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1340.138942] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f1b057-80e2-f617-6486-0c649fdb9972" [ 1340.138942] env[63241]: _type = "Task" [ 1340.138942] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.167598] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181348MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1340.167598] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.173761] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f1b057-80e2-f617-6486-0c649fdb9972, 'name': SearchDatastore_Task, 'duration_secs': 0.010674} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.173935] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.174204] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] bbb94f08-7df2-457e-bc5b-d0008839cf20/bbb94f08-7df2-457e-bc5b-d0008839cf20.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1340.174456] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc6b3d6c-9d89-4eb7-8dab-4c49bba6a492 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.183020] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1340.183020] env[63241]: value = "task-1819795" [ 1340.183020] env[63241]: _type = "Task" [ 1340.183020] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.190974] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.229312] env[63241]: DEBUG nova.network.neutron [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Successfully updated port: 7168ddd9-eca4-4ba1-a734-ef4f493aa646 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1340.240272] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819794, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198337} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.240385] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1340.240481] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1340.240654] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1340.570385] env[63241]: DEBUG nova.compute.utils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1340.571851] env[63241]: DEBUG nova.compute.manager [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1340.572224] env[63241]: DEBUG nova.network.neutron [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1340.695811] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.735282] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquiring lock "refresh_cache-0440c0a8-f065-4a82-b190-33279e7c0d93" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1340.739020] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquired lock "refresh_cache-0440c0a8-f065-4a82-b190-33279e7c0d93" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.739020] env[63241]: DEBUG nova.network.neutron [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1340.767086] env[63241]: DEBUG nova.policy [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53f5b40d7cf04b3d8702df00367b22a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38c709b68d2a40049d6d4795267987d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1341.082412] env[63241]: DEBUG nova.compute.manager [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1341.100973] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "44508cc6-c576-4c30-8559-75118ceba02a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.100973] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "44508cc6-c576-4c30-8559-75118ceba02a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.197806] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819795, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.907123} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.200481] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] bbb94f08-7df2-457e-bc5b-d0008839cf20/bbb94f08-7df2-457e-bc5b-d0008839cf20.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1341.200695] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1341.201710] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fcc259b3-ce43-48e6-86cb-619af022fbb3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.210572] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1341.210572] env[63241]: value = "task-1819796" [ 1341.210572] env[63241]: _type = "Task" [ 1341.210572] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.231749] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819796, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.296078] env[63241]: DEBUG nova.virt.hardware [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1341.296413] env[63241]: DEBUG nova.virt.hardware [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1341.296553] env[63241]: DEBUG nova.virt.hardware [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1341.296740] env[63241]: DEBUG nova.virt.hardware [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1341.296880] env[63241]: DEBUG nova.virt.hardware [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1341.297087] env[63241]: DEBUG nova.virt.hardware [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1341.297339] env[63241]: DEBUG nova.virt.hardware [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1341.297495] env[63241]: DEBUG nova.virt.hardware [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1341.298290] env[63241]: DEBUG nova.virt.hardware [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1341.298493] env[63241]: DEBUG nova.virt.hardware [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1341.299906] env[63241]: DEBUG nova.virt.hardware [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1341.300823] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674f8740-aa62-4465-bafb-f3ef817a5be6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.314725] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101d7086-4296-4340-baf2-401300fa1d3d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.319682] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b592eea4-7bfe-4500-b44c-bde6e4588d41 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.340428] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1341.346975] env[63241]: DEBUG oslo.service.loopingcall [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1341.347789] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4182e53-50db-4256-b376-b00100778935] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1341.348727] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80eeca31-2866-40f5-8eaf-c7ee887e6f15 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.352178] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fbed130-65a6-4c28-b3e2-a524d41e20af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.404187] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8612b0-50a5-4472-a833-593fcee3b916 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.406905] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1341.406905] env[63241]: value = "task-1819797" [ 1341.406905] env[63241]: _type = "Task" [ 1341.406905] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.416732] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5bf913-7846-42aa-99ac-d10e5459fe61 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.426448] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819797, 'name': CreateVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.438722] env[63241]: DEBUG nova.compute.provider_tree [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.575845] env[63241]: DEBUG nova.network.neutron [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1341.604891] env[63241]: DEBUG nova.compute.manager [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1341.723883] env[63241]: DEBUG nova.network.neutron [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Successfully created port: c55c077c-917c-45af-a4ea-c51ad8696209 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1341.737330] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819796, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071787} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.742539] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1341.743421] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e707012-5dd3-4e4a-9316-7f2d7dc0bf69 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.776550] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] bbb94f08-7df2-457e-bc5b-d0008839cf20/bbb94f08-7df2-457e-bc5b-d0008839cf20.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1341.781179] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88ab6b28-56ad-4d2d-b414-b68e001510b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.806442] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1341.806442] env[63241]: value = "task-1819799" [ 1341.806442] env[63241]: _type = "Task" [ 1341.806442] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.816595] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819799, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.922579] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819797, 'name': CreateVM_Task, 'duration_secs': 0.357768} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.923899] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4182e53-50db-4256-b376-b00100778935] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1341.923899] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.923899] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.923899] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1341.924101] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a4a42b3-9ace-41e1-abd0-762039107484 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.930531] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1341.930531] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a61aef-dbf9-b64d-5ad8-964f78c5d658" [ 1341.930531] env[63241]: _type = "Task" [ 1341.930531] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.944338] env[63241]: DEBUG nova.scheduler.client.report [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1341.949738] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a61aef-dbf9-b64d-5ad8-964f78c5d658, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.097264] env[63241]: DEBUG nova.compute.manager [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1342.138635] env[63241]: DEBUG nova.virt.hardware [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1342.138635] env[63241]: DEBUG nova.virt.hardware [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1342.138635] env[63241]: DEBUG nova.virt.hardware [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1342.138794] env[63241]: DEBUG nova.virt.hardware [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1342.138910] env[63241]: DEBUG nova.virt.hardware [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1342.139178] env[63241]: DEBUG nova.virt.hardware [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1342.139419] env[63241]: DEBUG nova.virt.hardware [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1342.139610] env[63241]: DEBUG nova.virt.hardware [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1342.139782] env[63241]: DEBUG nova.virt.hardware [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1342.139937] env[63241]: DEBUG nova.virt.hardware [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1342.140117] env[63241]: DEBUG nova.virt.hardware [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1342.142850] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19311203-e617-4646-9f26-155031368ba8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.145223] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.158021] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c36015-83e5-4498-bd27-5ceaca84badd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.321700] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819799, 'name': ReconfigVM_Task, 'duration_secs': 0.318062} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.322534] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Reconfigured VM instance instance-00000003 to attach disk [datastore1] bbb94f08-7df2-457e-bc5b-d0008839cf20/bbb94f08-7df2-457e-bc5b-d0008839cf20.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1342.327041] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-601f6bfa-ef9a-4a5c-baa5-faadbc113cb1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.336016] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1342.336016] env[63241]: value = "task-1819800" [ 1342.336016] env[63241]: _type = "Task" [ 1342.336016] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.347446] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819800, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.351715] env[63241]: DEBUG nova.network.neutron [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Updating instance_info_cache with network_info: [{"id": "7168ddd9-eca4-4ba1-a734-ef4f493aa646", "address": "fa:16:3e:87:ec:da", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7168ddd9-ec", "ovs_interfaceid": "7168ddd9-eca4-4ba1-a734-ef4f493aa646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.426698] env[63241]: DEBUG nova.network.neutron [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Successfully updated port: 39b9ee92-fa8c-4018-be8f-6ad78d44a1a8 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1342.447765] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a61aef-dbf9-b64d-5ad8-964f78c5d658, 'name': SearchDatastore_Task, 'duration_secs': 0.013885} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.447765] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1342.447765] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1342.447765] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.447898] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.447898] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1342.448407] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b729c2f-98fa-434a-b66c-1e3a0ef67208 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.455083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.455724] env[63241]: DEBUG nova.compute.manager [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1342.459187] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.439s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1342.462942] env[63241]: INFO nova.compute.claims [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1342.473235] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1342.473479] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1342.474516] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-078e65d8-9de3-4541-99b0-09d6660b22a4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.481762] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1342.481762] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52dd90eb-b41b-0a5f-c83c-bfac6f32a399" [ 1342.481762] env[63241]: _type = "Task" [ 1342.481762] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.497140] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dd90eb-b41b-0a5f-c83c-bfac6f32a399, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.847187] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819800, 'name': Rename_Task, 'duration_secs': 0.181531} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.847506] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1342.847788] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a28dedd2-448e-48c7-ac86-a6b5018aa986 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.855325] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Releasing lock "refresh_cache-0440c0a8-f065-4a82-b190-33279e7c0d93" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1342.855715] env[63241]: DEBUG nova.compute.manager [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Instance network_info: |[{"id": "7168ddd9-eca4-4ba1-a734-ef4f493aa646", "address": "fa:16:3e:87:ec:da", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7168ddd9-ec", "ovs_interfaceid": "7168ddd9-eca4-4ba1-a734-ef4f493aa646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1342.857580] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:ec:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7168ddd9-eca4-4ba1-a734-ef4f493aa646', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1342.867562] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Creating folder: Project (a98c5e44c88f4470a83a16d1ba190a47). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1342.867928] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1342.867928] env[63241]: value = "task-1819801" [ 1342.867928] env[63241]: _type = "Task" [ 1342.867928] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.868131] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86d92922-ffe5-41ae-85a7-57ed8327e28c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.882827] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819801, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.886483] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Created folder: Project (a98c5e44c88f4470a83a16d1ba190a47) in parent group-v376927. [ 1342.887031] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Creating folder: Instances. Parent ref: group-v376942. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1342.887181] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d127d86-b343-42ea-836b-e9fb4cecce74 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.902065] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Created folder: Instances in parent group-v376942. [ 1342.902581] env[63241]: DEBUG oslo.service.loopingcall [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1342.902879] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1342.903298] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7291260-1c5f-4fdf-9bfb-9c3a6aac1c3e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.932629] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquiring lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.932807] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquired lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.934932] env[63241]: DEBUG nova.network.neutron [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1342.936017] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1342.936017] env[63241]: value = "task-1819804" [ 1342.936017] env[63241]: _type = "Task" [ 1342.936017] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.945497] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819804, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.969375] env[63241]: DEBUG nova.compute.utils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1342.975706] env[63241]: DEBUG nova.compute.manager [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1342.977286] env[63241]: DEBUG nova.network.neutron [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1343.000593] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dd90eb-b41b-0a5f-c83c-bfac6f32a399, 'name': SearchDatastore_Task, 'duration_secs': 0.010313} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.001475] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-719274be-01d3-4f49-a6b5-b758cf32210c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.009816] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1343.009816] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5216a2ce-056b-5f41-9e57-1ed0aaf4ae1d" [ 1343.009816] env[63241]: _type = "Task" [ 1343.009816] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.020138] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5216a2ce-056b-5f41-9e57-1ed0aaf4ae1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.105866] env[63241]: DEBUG nova.policy [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1328b5860e6490aa44a9bd45f747ddf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58d423eaebec4695928cb0d2b698bbfd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1343.193444] env[63241]: DEBUG nova.compute.manager [req-bddcaf1d-e8b9-4c99-9b20-3ba39da4f61f req-1806a1e3-7524-4359-8aec-f39630447e8d service nova] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Received event network-vif-plugged-7168ddd9-eca4-4ba1-a734-ef4f493aa646 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1343.193768] env[63241]: DEBUG oslo_concurrency.lockutils [req-bddcaf1d-e8b9-4c99-9b20-3ba39da4f61f req-1806a1e3-7524-4359-8aec-f39630447e8d service nova] Acquiring lock "0440c0a8-f065-4a82-b190-33279e7c0d93-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.194108] env[63241]: DEBUG oslo_concurrency.lockutils [req-bddcaf1d-e8b9-4c99-9b20-3ba39da4f61f req-1806a1e3-7524-4359-8aec-f39630447e8d service nova] Lock "0440c0a8-f065-4a82-b190-33279e7c0d93-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.194318] env[63241]: DEBUG oslo_concurrency.lockutils [req-bddcaf1d-e8b9-4c99-9b20-3ba39da4f61f req-1806a1e3-7524-4359-8aec-f39630447e8d service nova] Lock "0440c0a8-f065-4a82-b190-33279e7c0d93-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.194492] env[63241]: DEBUG nova.compute.manager [req-bddcaf1d-e8b9-4c99-9b20-3ba39da4f61f req-1806a1e3-7524-4359-8aec-f39630447e8d service nova] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] No waiting events found dispatching network-vif-plugged-7168ddd9-eca4-4ba1-a734-ef4f493aa646 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1343.194663] env[63241]: WARNING nova.compute.manager [req-bddcaf1d-e8b9-4c99-9b20-3ba39da4f61f req-1806a1e3-7524-4359-8aec-f39630447e8d service nova] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Received unexpected event network-vif-plugged-7168ddd9-eca4-4ba1-a734-ef4f493aa646 for instance with vm_state building and task_state spawning. [ 1343.381675] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819801, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.449173] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819804, 'name': CreateVM_Task, 'duration_secs': 0.438683} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.449173] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1343.450704] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.450704] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.450899] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1343.451244] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-880c31fa-4d0c-42ff-9372-38cc09716c7d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.456920] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for the task: (returnval){ [ 1343.456920] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]525eab31-e844-65dc-ea87-abdd8c4d1589" [ 1343.456920] env[63241]: _type = "Task" [ 1343.456920] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.466570] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525eab31-e844-65dc-ea87-abdd8c4d1589, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.474470] env[63241]: DEBUG nova.compute.manager [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1343.535465] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5216a2ce-056b-5f41-9e57-1ed0aaf4ae1d, 'name': SearchDatastore_Task, 'duration_secs': 0.010504} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.540866] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.541180] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1343.541843] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11e17320-2e8c-48fc-b175-f24236a289d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.551056] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1343.551056] env[63241]: value = "task-1819805" [ 1343.551056] env[63241]: _type = "Task" [ 1343.551056] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.564389] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819805, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.567048] env[63241]: DEBUG nova.compute.manager [req-a7aef02c-6f5a-4b97-ab1b-d19c3365cadf req-724a7267-dab0-4e5e-9088-962a414a43d6 service nova] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Received event network-changed-2295e83d-9394-4f35-be55-49b2eb1f271b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1343.567048] env[63241]: DEBUG nova.compute.manager [req-a7aef02c-6f5a-4b97-ab1b-d19c3365cadf req-724a7267-dab0-4e5e-9088-962a414a43d6 service nova] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Refreshing instance network info cache due to event network-changed-2295e83d-9394-4f35-be55-49b2eb1f271b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1343.567048] env[63241]: DEBUG oslo_concurrency.lockutils [req-a7aef02c-6f5a-4b97-ab1b-d19c3365cadf req-724a7267-dab0-4e5e-9088-962a414a43d6 service nova] Acquiring lock "refresh_cache-bbb94f08-7df2-457e-bc5b-d0008839cf20" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.567268] env[63241]: DEBUG oslo_concurrency.lockutils [req-a7aef02c-6f5a-4b97-ab1b-d19c3365cadf req-724a7267-dab0-4e5e-9088-962a414a43d6 service nova] Acquired lock "refresh_cache-bbb94f08-7df2-457e-bc5b-d0008839cf20" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.568070] env[63241]: DEBUG nova.network.neutron [req-a7aef02c-6f5a-4b97-ab1b-d19c3365cadf req-724a7267-dab0-4e5e-9088-962a414a43d6 service nova] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Refreshing network info cache for port 2295e83d-9394-4f35-be55-49b2eb1f271b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1343.699039] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7711f3a8-17f9-4f70-8167-cf7e598a6620 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.711259] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e167f105-fa31-49c2-a0e1-8e420db69e16 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.756646] env[63241]: DEBUG nova.network.neutron [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1343.759256] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88310ff1-a841-4914-a77b-c1cf521f1f69 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.769476] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e116502e-3f56-42cb-b9f2-2a894cf5e92e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.786880] env[63241]: DEBUG nova.compute.provider_tree [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1343.882587] env[63241]: DEBUG oslo_vmware.api [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1819801, 'name': PowerOnVM_Task, 'duration_secs': 0.553041} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.882587] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1343.882867] env[63241]: INFO nova.compute.manager [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Took 13.07 seconds to spawn the instance on the hypervisor. [ 1343.882867] env[63241]: DEBUG nova.compute.manager [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1343.883868] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08866243-df12-4761-a271-c38e5826d75e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.976407] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525eab31-e844-65dc-ea87-abdd8c4d1589, 'name': SearchDatastore_Task, 'duration_secs': 0.009147} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.977155] env[63241]: DEBUG nova.network.neutron [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Successfully updated port: bc8209bb-1cd1-4efc-806c-3fb04ffc73c5 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1343.979404] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.979909] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1343.979909] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1343.980018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.983700] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1343.988960] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd647acf-a5fe-4b8e-9899-8e1f5c8ccaa5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.000096] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1344.000096] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1344.003439] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faca08bd-5acb-4566-89c3-93a2c886f3bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.012024] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for the task: (returnval){ [ 1344.012024] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522670aa-cc0d-4f0e-1304-62f1c85fc161" [ 1344.012024] env[63241]: _type = "Task" [ 1344.012024] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.022713] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522670aa-cc0d-4f0e-1304-62f1c85fc161, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.063519] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819805, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449187} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.063777] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1344.063975] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1344.064233] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c705179-f26c-4379-87e0-44776f0f5296 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.075926] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1344.075926] env[63241]: value = "task-1819806" [ 1344.075926] env[63241]: _type = "Task" [ 1344.075926] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.090640] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819806, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.257336] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "99eccbef-0e76-4532-af2f-5d74e563e1d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.257602] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "99eccbef-0e76-4532-af2f-5d74e563e1d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.291060] env[63241]: DEBUG nova.scheduler.client.report [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1344.300264] env[63241]: DEBUG nova.network.neutron [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Updating instance_info_cache with network_info: [{"id": "39b9ee92-fa8c-4018-be8f-6ad78d44a1a8", "address": "fa:16:3e:02:28:3a", "network": {"id": "66538b1c-dfa7-4a9a-84ed-9775e692d300", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1045273516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06bbbe738ef34806971a4883b7bb3cc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39b9ee92-fa", "ovs_interfaceid": "39b9ee92-fa8c-4018-be8f-6ad78d44a1a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.352812] env[63241]: DEBUG nova.network.neutron [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Successfully created port: b508f7cc-ab52-4957-9c5e-500a7f0b2335 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1344.404802] env[63241]: INFO nova.compute.manager [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Took 19.95 seconds to build instance. [ 1344.485208] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "refresh_cache-a1a8342a-b00e-42c1-8c01-a95659a78caf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.485350] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "refresh_cache-a1a8342a-b00e-42c1-8c01-a95659a78caf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.485501] env[63241]: DEBUG nova.network.neutron [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1344.493848] env[63241]: DEBUG nova.compute.manager [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1344.519630] env[63241]: DEBUG nova.virt.hardware [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1344.519934] env[63241]: DEBUG nova.virt.hardware [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1344.520130] env[63241]: DEBUG nova.virt.hardware [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1344.520323] env[63241]: DEBUG nova.virt.hardware [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1344.520469] env[63241]: DEBUG nova.virt.hardware [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1344.520614] env[63241]: DEBUG nova.virt.hardware [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1344.520840] env[63241]: DEBUG nova.virt.hardware [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1344.520973] env[63241]: DEBUG nova.virt.hardware [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1344.521239] env[63241]: DEBUG nova.virt.hardware [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1344.521434] env[63241]: DEBUG nova.virt.hardware [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1344.521603] env[63241]: DEBUG nova.virt.hardware [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1344.522952] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f77ab3a-d81d-4bdc-ae0a-f7d2401b9e36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.536440] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a9e189-3a71-4865-a115-0780d123191d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.541163] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522670aa-cc0d-4f0e-1304-62f1c85fc161, 'name': SearchDatastore_Task, 'duration_secs': 0.017158} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.543427] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21855a19-1a58-4498-8ad9-0eb2f342adaa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.558719] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for the task: (returnval){ [ 1344.558719] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522ba82f-c877-2cae-03a4-fee2031d76d3" [ 1344.558719] env[63241]: _type = "Task" [ 1344.558719] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.568779] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522ba82f-c877-2cae-03a4-fee2031d76d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.589680] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819806, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071695} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.589680] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1344.589680] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1d6092-95d8-41ea-bc39-84cc0936b364 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.612131] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1344.612464] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-703e57cc-86b4-4e0c-ba3d-9205708179f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.633494] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1344.633494] env[63241]: value = "task-1819807" [ 1344.633494] env[63241]: _type = "Task" [ 1344.633494] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.643857] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819807, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.760928] env[63241]: DEBUG nova.compute.manager [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1344.797782] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.339s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.798417] env[63241]: DEBUG nova.compute.manager [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1344.801716] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.551s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.803522] env[63241]: INFO nova.compute.claims [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1344.807101] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Releasing lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.807464] env[63241]: DEBUG nova.compute.manager [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Instance network_info: |[{"id": "39b9ee92-fa8c-4018-be8f-6ad78d44a1a8", "address": "fa:16:3e:02:28:3a", "network": {"id": "66538b1c-dfa7-4a9a-84ed-9775e692d300", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1045273516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06bbbe738ef34806971a4883b7bb3cc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39b9ee92-fa", "ovs_interfaceid": "39b9ee92-fa8c-4018-be8f-6ad78d44a1a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1344.808269] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:28:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9ec24851-7bb6-426b-b28f-f7b246df1713', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39b9ee92-fa8c-4018-be8f-6ad78d44a1a8', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1344.816971] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Creating folder: Project (06bbbe738ef34806971a4883b7bb3cc4). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1344.817387] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-021313c9-45a2-4773-9ce7-134e6245e7ba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.834712] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Created folder: Project (06bbbe738ef34806971a4883b7bb3cc4) in parent group-v376927. [ 1344.835150] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Creating folder: Instances. Parent ref: group-v376945. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1344.835239] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49b44828-d014-48e0-80f5-ffa4e67fdbb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.851641] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Created folder: Instances in parent group-v376945. [ 1344.851641] env[63241]: DEBUG oslo.service.loopingcall [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1344.852279] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1344.852507] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f2b785d-5be8-4382-9466-e73a1635f91e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.875280] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1344.875280] env[63241]: value = "task-1819810" [ 1344.875280] env[63241]: _type = "Task" [ 1344.875280] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.885750] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819810, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.908261] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2a8cb0a-d03f-4e79-897c-0392ff9c34b3 tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "bbb94f08-7df2-457e-bc5b-d0008839cf20" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.465s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.046422] env[63241]: DEBUG nova.network.neutron [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1345.075685] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522ba82f-c877-2cae-03a4-fee2031d76d3, 'name': SearchDatastore_Task, 'duration_secs': 0.040844} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.078983] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.079428] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0440c0a8-f065-4a82-b190-33279e7c0d93/0440c0a8-f065-4a82-b190-33279e7c0d93.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1345.079782] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2071ed1-07dd-4f03-958f-6bee37f4536b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.090453] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for the task: (returnval){ [ 1345.090453] env[63241]: value = "task-1819811" [ 1345.090453] env[63241]: _type = "Task" [ 1345.090453] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.103725] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819811, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.145850] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819807, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.251034] env[63241]: DEBUG nova.network.neutron [req-a7aef02c-6f5a-4b97-ab1b-d19c3365cadf req-724a7267-dab0-4e5e-9088-962a414a43d6 service nova] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Updated VIF entry in instance network info cache for port 2295e83d-9394-4f35-be55-49b2eb1f271b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1345.251405] env[63241]: DEBUG nova.network.neutron [req-a7aef02c-6f5a-4b97-ab1b-d19c3365cadf req-724a7267-dab0-4e5e-9088-962a414a43d6 service nova] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Updating instance_info_cache with network_info: [{"id": "2295e83d-9394-4f35-be55-49b2eb1f271b", "address": "fa:16:3e:b5:4c:33", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2295e83d-93", "ovs_interfaceid": "2295e83d-9394-4f35-be55-49b2eb1f271b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.291076] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.312769] env[63241]: DEBUG nova.compute.utils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1345.323774] env[63241]: DEBUG nova.compute.manager [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1345.324050] env[63241]: DEBUG nova.network.neutron [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1345.364653] env[63241]: DEBUG nova.network.neutron [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Updating instance_info_cache with network_info: [{"id": "bc8209bb-1cd1-4efc-806c-3fb04ffc73c5", "address": "fa:16:3e:02:50:ff", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8209bb-1c", "ovs_interfaceid": "bc8209bb-1cd1-4efc-806c-3fb04ffc73c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.389957] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819810, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.529787] env[63241]: DEBUG nova.policy [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5010fa4fbe3a496a9f354606c7a2a09c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69f026988ee840638c4d1163b629d7cb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1345.605887] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819811, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.651196] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819807, 'name': ReconfigVM_Task, 'duration_secs': 0.804176} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.651624] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Reconfigured VM instance instance-00000002 to attach disk [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1345.652646] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b062bb1-cd0c-47d2-bd28-3a1fc49b72aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.659829] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1345.659829] env[63241]: value = "task-1819812" [ 1345.659829] env[63241]: _type = "Task" [ 1345.659829] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.688784] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819812, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.757981] env[63241]: DEBUG oslo_concurrency.lockutils [req-a7aef02c-6f5a-4b97-ab1b-d19c3365cadf req-724a7267-dab0-4e5e-9088-962a414a43d6 service nova] Releasing lock "refresh_cache-bbb94f08-7df2-457e-bc5b-d0008839cf20" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.824939] env[63241]: DEBUG nova.compute.manager [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1345.867315] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "refresh_cache-a1a8342a-b00e-42c1-8c01-a95659a78caf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.875225] env[63241]: DEBUG nova.compute.manager [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Instance network_info: |[{"id": "bc8209bb-1cd1-4efc-806c-3fb04ffc73c5", "address": "fa:16:3e:02:50:ff", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8209bb-1c", "ovs_interfaceid": "bc8209bb-1cd1-4efc-806c-3fb04ffc73c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1345.875450] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:50:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '329d0e4b-4190-484a-8560-9356dc31beca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc8209bb-1cd1-4efc-806c-3fb04ffc73c5', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1345.880957] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Creating folder: Project (38c709b68d2a40049d6d4795267987d8). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1345.887222] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d947368-2c86-4592-8637-c1fbd8840210 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.910537] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819810, 'name': CreateVM_Task, 'duration_secs': 0.618561} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.910814] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1345.911871] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.912123] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.912452] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1345.912710] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c273eba5-001f-42f2-b233-4a28d79b53ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.921770] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for the task: (returnval){ [ 1345.921770] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5275db39-1c33-3b41-9390-df300b573456" [ 1345.921770] env[63241]: _type = "Task" [ 1345.921770] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.932984] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Created folder: Project (38c709b68d2a40049d6d4795267987d8) in parent group-v376927. [ 1345.932984] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Creating folder: Instances. Parent ref: group-v376948. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1345.936120] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da8ce98c-694d-416b-a59d-0a8b9299eb51 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.944155] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5275db39-1c33-3b41-9390-df300b573456, 'name': SearchDatastore_Task, 'duration_secs': 0.017603} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.945065] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.945400] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1345.945687] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.945835] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.946098] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1345.946514] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0fd9f5ff-27d8-423c-9426-9bff3ddc4e84 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.954829] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Created folder: Instances in parent group-v376948. [ 1345.956023] env[63241]: DEBUG oslo.service.loopingcall [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1345.956023] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1345.956023] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6bfca21d-a40a-4867-a32d-f9357558f1d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.979990] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1345.980648] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1345.982478] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4429320c-bff3-4902-9cd4-cf3c684f7c86 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.988705] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1345.988705] env[63241]: value = "task-1819815" [ 1345.988705] env[63241]: _type = "Task" [ 1345.988705] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.995462] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for the task: (returnval){ [ 1345.995462] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52edb43d-750e-b4e0-21ac-44e22ebf2d1a" [ 1345.995462] env[63241]: _type = "Task" [ 1345.995462] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.008626] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819815, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.017663] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52edb43d-750e-b4e0-21ac-44e22ebf2d1a, 'name': SearchDatastore_Task, 'duration_secs': 0.014136} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.018576] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e45ef872-cb4a-405c-ab76-395c3f7625a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.030319] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for the task: (returnval){ [ 1346.030319] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52aab54d-7831-ca85-df36-74a5de3212f3" [ 1346.030319] env[63241]: _type = "Task" [ 1346.030319] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.041049] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52aab54d-7831-ca85-df36-74a5de3212f3, 'name': SearchDatastore_Task, 'duration_secs': 0.011195} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.041331] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.044029] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 780f3eee-f6c7-4054-8e6e-a370f74dc405/780f3eee-f6c7-4054-8e6e-a370f74dc405.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1346.044029] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4258fc5-b612-4f6e-992c-eba5cd12b966 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.050513] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for the task: (returnval){ [ 1346.050513] env[63241]: value = "task-1819816" [ 1346.050513] env[63241]: _type = "Task" [ 1346.050513] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.063136] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819816, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.106695] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819811, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554351} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.106903] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0440c0a8-f065-4a82-b190-33279e7c0d93/0440c0a8-f065-4a82-b190-33279e7c0d93.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1346.106995] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1346.108471] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58bcd132-3bdd-4493-b923-1503762f0f01 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.129693] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for the task: (returnval){ [ 1346.129693] env[63241]: value = "task-1819817" [ 1346.129693] env[63241]: _type = "Task" [ 1346.129693] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.141988] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819817, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.178707] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819812, 'name': Rename_Task, 'duration_secs': 0.151884} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.182029] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1346.182758] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-553752bb-e9b3-49b3-910c-6fa1974d8aac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.190911] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1346.190911] env[63241]: value = "task-1819818" [ 1346.190911] env[63241]: _type = "Task" [ 1346.190911] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.198570] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e2d547-de84-44eb-9509-0707da39ef18 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.206462] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.221256] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9923f22-2a37-4983-9d28-1e324c004a6f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.257364] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24222a79-9b57-4819-8ccb-711b37cb47b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.267832] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d9ee26-ba65-4515-a0ce-8c94e4069248 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.287867] env[63241]: DEBUG nova.compute.provider_tree [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.361682] env[63241]: DEBUG nova.network.neutron [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Successfully updated port: c55c077c-917c-45af-a4ea-c51ad8696209 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1346.504635] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819815, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.513893] env[63241]: DEBUG nova.network.neutron [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Successfully created port: 4388c54d-69af-4eb2-8f0b-e40773bf2e95 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1346.567180] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819816, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505224} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.567180] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 780f3eee-f6c7-4054-8e6e-a370f74dc405/780f3eee-f6c7-4054-8e6e-a370f74dc405.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1346.567180] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1346.567180] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b065d32-2ad9-4474-bd62-ba0e97e99045 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.576184] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for the task: (returnval){ [ 1346.576184] env[63241]: value = "task-1819819" [ 1346.576184] env[63241]: _type = "Task" [ 1346.576184] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.590345] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819819, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.641154] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819817, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070971} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.641154] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1346.642094] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03f13e6-90f5-453d-a095-6e223bbcf15a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.670486] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 0440c0a8-f065-4a82-b190-33279e7c0d93/0440c0a8-f065-4a82-b190-33279e7c0d93.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1346.672090] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e879fcb9-f866-467d-97be-a00d839248fa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.693881] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquiring lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.693881] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.701166] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for the task: (returnval){ [ 1346.701166] env[63241]: value = "task-1819820" [ 1346.701166] env[63241]: _type = "Task" [ 1346.701166] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.705394] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819818, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.716297] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819820, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.792503] env[63241]: DEBUG nova.scheduler.client.report [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1346.843929] env[63241]: DEBUG nova.compute.manager [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1346.864492] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "refresh_cache-0e5447fd-a04f-4bc2-b329-e015883773b8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.864593] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "refresh_cache-0e5447fd-a04f-4bc2-b329-e015883773b8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.865398] env[63241]: DEBUG nova.network.neutron [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1346.870593] env[63241]: DEBUG nova.virt.hardware [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1346.870853] env[63241]: DEBUG nova.virt.hardware [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1346.871016] env[63241]: DEBUG nova.virt.hardware [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1346.871215] env[63241]: DEBUG nova.virt.hardware [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1346.871359] env[63241]: DEBUG nova.virt.hardware [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1346.871547] env[63241]: DEBUG nova.virt.hardware [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1346.871705] env[63241]: DEBUG nova.virt.hardware [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1346.871857] env[63241]: DEBUG nova.virt.hardware [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1346.872072] env[63241]: DEBUG nova.virt.hardware [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1346.872260] env[63241]: DEBUG nova.virt.hardware [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1346.872966] env[63241]: DEBUG nova.virt.hardware [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1346.873389] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d9a8e4-e710-4a42-8adc-401d4e90b678 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.885826] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501967f9-0c00-4db7-a165-cec3b4689cd4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.914491] env[63241]: DEBUG nova.network.neutron [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Successfully updated port: b508f7cc-ab52-4957-9c5e-500a7f0b2335 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1347.002870] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819815, 'name': CreateVM_Task, 'duration_secs': 0.530089} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.003090] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1347.003914] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.004322] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.004457] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1347.004780] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21a882d9-9a28-4019-a639-e40d91168d8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.011982] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1347.011982] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ea376d-7f75-017e-aa08-13dece479fcf" [ 1347.011982] env[63241]: _type = "Task" [ 1347.011982] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.022276] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ea376d-7f75-017e-aa08-13dece479fcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.080516] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquiring lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.080783] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.094109] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819819, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090959} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.094109] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1347.094109] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4d751e-78a5-4e57-a94e-ed508be49774 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.122798] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 780f3eee-f6c7-4054-8e6e-a370f74dc405/780f3eee-f6c7-4054-8e6e-a370f74dc405.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1347.122798] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18bd9f9d-bb8b-4010-813c-9051b83968ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.145323] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for the task: (returnval){ [ 1347.145323] env[63241]: value = "task-1819821" [ 1347.145323] env[63241]: _type = "Task" [ 1347.145323] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.158343] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819821, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.194558] env[63241]: DEBUG nova.compute.manager [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1347.209867] env[63241]: DEBUG oslo_vmware.api [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819818, 'name': PowerOnVM_Task, 'duration_secs': 0.644148} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.213119] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1347.213336] env[63241]: DEBUG nova.compute.manager [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1347.214139] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94db28f-4e52-4f58-8b0a-1da4a1df8649 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.224349] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819820, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.299422] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.497s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.299518] env[63241]: DEBUG nova.compute.manager [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1347.302654] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.135s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.417724] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "refresh_cache-fbbb7682-873d-4bb0-8d39-4aec3566b0af" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.417907] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquired lock "refresh_cache-fbbb7682-873d-4bb0-8d39-4aec3566b0af" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.418079] env[63241]: DEBUG nova.network.neutron [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1347.431980] env[63241]: DEBUG nova.network.neutron [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1347.527625] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ea376d-7f75-017e-aa08-13dece479fcf, 'name': SearchDatastore_Task, 'duration_secs': 0.010713} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.528188] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.528467] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1347.528816] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.528928] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.529619] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1347.529947] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b857fba5-ecf4-4072-b561-f29242420ed1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.542405] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1347.542610] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1347.543403] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-573fd1f0-c63d-4495-a749-3650a24e4e9c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.553320] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1347.553320] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b07e1d-0554-5d5c-7936-8cbf187f78cb" [ 1347.553320] env[63241]: _type = "Task" [ 1347.553320] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.563416] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b07e1d-0554-5d5c-7936-8cbf187f78cb, 'name': SearchDatastore_Task, 'duration_secs': 0.010049} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.564230] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a65a378-df81-436e-8816-b1378d700066 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.570810] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1347.570810] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528f2740-faec-5af0-3d9f-173a06984dd2" [ 1347.570810] env[63241]: _type = "Task" [ 1347.570810] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.581885] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528f2740-faec-5af0-3d9f-173a06984dd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.660805] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819821, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.724405] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819820, 'name': ReconfigVM_Task, 'duration_secs': 0.94809} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.725802] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.726980] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 0440c0a8-f065-4a82-b190-33279e7c0d93/0440c0a8-f065-4a82-b190-33279e7c0d93.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1347.727099] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e4553b84-d488-4a92-9057-781b4c76c2ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.737566] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for the task: (returnval){ [ 1347.737566] env[63241]: value = "task-1819822" [ 1347.737566] env[63241]: _type = "Task" [ 1347.737566] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.747253] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.761134] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819822, 'name': Rename_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.814218] env[63241]: DEBUG nova.compute.utils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1347.815647] env[63241]: DEBUG nova.compute.manager [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1347.815871] env[63241]: DEBUG nova.network.neutron [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1347.951621] env[63241]: DEBUG nova.policy [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'efae520b7bdf459ab1bdc0a9692026bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b1a99e9ac8f4fc0bbd763a9d91321af', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1347.971390] env[63241]: DEBUG nova.network.neutron [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Updating instance_info_cache with network_info: [{"id": "c55c077c-917c-45af-a4ea-c51ad8696209", "address": "fa:16:3e:41:1d:ed", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc55c077c-91", "ovs_interfaceid": "c55c077c-917c-45af-a4ea-c51ad8696209", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.019652] env[63241]: DEBUG nova.network.neutron [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1348.083861] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528f2740-faec-5af0-3d9f-173a06984dd2, 'name': SearchDatastore_Task, 'duration_secs': 0.011159} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.084147] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.084555] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1348.084694] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c06805f8-36b2-4403-99e9-2343d85e5bde {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.092514] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1348.092514] env[63241]: value = "task-1819823" [ 1348.092514] env[63241]: _type = "Task" [ 1348.092514] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.101909] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.156763] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819821, 'name': ReconfigVM_Task, 'duration_secs': 0.875417} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.163493] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 780f3eee-f6c7-4054-8e6e-a370f74dc405/780f3eee-f6c7-4054-8e6e-a370f74dc405.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1348.164264] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6ab7381-b060-4510-9d7c-ba48480a0656 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.174571] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for the task: (returnval){ [ 1348.174571] env[63241]: value = "task-1819824" [ 1348.174571] env[63241]: _type = "Task" [ 1348.174571] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.188533] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819824, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.253480] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819822, 'name': Rename_Task, 'duration_secs': 0.274687} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.254356] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1348.254356] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d600e753-272c-4d0f-8f7b-fe0a91be48e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.263360] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for the task: (returnval){ [ 1348.263360] env[63241]: value = "task-1819825" [ 1348.263360] env[63241]: _type = "Task" [ 1348.263360] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.274222] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquiring lock "eb506425-4ecc-44b7-afa4-0901fc60b04f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1348.274474] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Lock "eb506425-4ecc-44b7-afa4-0901fc60b04f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.280542] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819825, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.325214] env[63241]: DEBUG nova.compute.manager [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1348.368358] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 69c73342-258a-4b00-ba1b-ffdd5f247890 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.368358] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance b4182e53-50db-4256-b376-b00100778935 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.368358] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance bbb94f08-7df2-457e-bc5b-d0008839cf20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.368358] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 0440c0a8-f065-4a82-b190-33279e7c0d93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.368717] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 780f3eee-f6c7-4054-8e6e-a370f74dc405 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.368717] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance a1a8342a-b00e-42c1-8c01-a95659a78caf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.368717] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 0e5447fd-a04f-4bc2-b329-e015883773b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.368717] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance fbbb7682-873d-4bb0-8d39-4aec3566b0af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.369020] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 97890eda-0c1d-4423-acd2-60d3097c6f8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.369020] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance a1f24cfe-88f0-4e73-9ade-2dcf907848a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1348.436600] env[63241]: DEBUG nova.network.neutron [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Updating instance_info_cache with network_info: [{"id": "b508f7cc-ab52-4957-9c5e-500a7f0b2335", "address": "fa:16:3e:b9:16:53", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb508f7cc-ab", "ovs_interfaceid": "b508f7cc-ab52-4957-9c5e-500a7f0b2335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.475441] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "refresh_cache-0e5447fd-a04f-4bc2-b329-e015883773b8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.476999] env[63241]: DEBUG nova.compute.manager [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Instance network_info: |[{"id": "c55c077c-917c-45af-a4ea-c51ad8696209", "address": "fa:16:3e:41:1d:ed", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc55c077c-91", "ovs_interfaceid": "c55c077c-917c-45af-a4ea-c51ad8696209", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1348.477890] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:1d:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '329d0e4b-4190-484a-8560-9356dc31beca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c55c077c-917c-45af-a4ea-c51ad8696209', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1348.485980] env[63241]: DEBUG oslo.service.loopingcall [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1348.487146] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1348.487439] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f8e2940-2d35-46df-88d3-8fa7a88bf45f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.513031] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1348.513031] env[63241]: value = "task-1819826" [ 1348.513031] env[63241]: _type = "Task" [ 1348.513031] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.524854] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819826, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.618799] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474292} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.619773] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1348.620473] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1348.620473] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-623c40f0-fcd6-4abc-9231-c596b78ad6e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.630343] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1348.630343] env[63241]: value = "task-1819827" [ 1348.630343] env[63241]: _type = "Task" [ 1348.630343] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.651368] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819827, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.687080] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819824, 'name': Rename_Task, 'duration_secs': 0.253631} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.688786] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1348.688786] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5a9b9cf-f39d-4452-b8f8-d84866f467f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.696245] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for the task: (returnval){ [ 1348.696245] env[63241]: value = "task-1819828" [ 1348.696245] env[63241]: _type = "Task" [ 1348.696245] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.707542] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819828, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.782223] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819825, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.811419] env[63241]: DEBUG nova.network.neutron [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Successfully created port: 61bceac0-2e58-4bc3-92f6-c421aabdfc8b {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1348.872212] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 44508cc6-c576-4c30-8559-75118ceba02a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1348.940137] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Releasing lock "refresh_cache-fbbb7682-873d-4bb0-8d39-4aec3566b0af" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1348.940492] env[63241]: DEBUG nova.compute.manager [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Instance network_info: |[{"id": "b508f7cc-ab52-4957-9c5e-500a7f0b2335", "address": "fa:16:3e:b9:16:53", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb508f7cc-ab", "ovs_interfaceid": "b508f7cc-ab52-4957-9c5e-500a7f0b2335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1348.941784] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:16:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b508f7cc-ab52-4957-9c5e-500a7f0b2335', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1348.950168] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Creating folder: Project (58d423eaebec4695928cb0d2b698bbfd). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1348.950168] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e97b7403-6018-45ad-b23e-c29d761810e4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.967234] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Created folder: Project (58d423eaebec4695928cb0d2b698bbfd) in parent group-v376927. [ 1348.967459] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Creating folder: Instances. Parent ref: group-v376952. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1348.967714] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2982f096-c297-4cb6-8c0d-3681400dab59 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.990301] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Created folder: Instances in parent group-v376952. [ 1348.990301] env[63241]: DEBUG oslo.service.loopingcall [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1348.990301] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1348.990527] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-277f489f-58df-485f-b0cf-aa0a3e017050 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.023312] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1349.023312] env[63241]: value = "task-1819831" [ 1349.023312] env[63241]: _type = "Task" [ 1349.023312] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.032102] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819826, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.040900] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819831, 'name': CreateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.124939] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "0c72c98b-57f0-44e5-9159-490b27eac3a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.125354] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "0c72c98b-57f0-44e5-9159-490b27eac3a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.143593] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819827, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089793} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.143861] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1349.144835] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a6bb43-55ae-4833-be0b-bfc5ff40e380 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.170404] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1349.170554] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f34a313-923e-469a-806c-e173f0f58985 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.195501] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1349.195501] env[63241]: value = "task-1819832" [ 1349.195501] env[63241]: _type = "Task" [ 1349.195501] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.212174] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819832, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.215858] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819828, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.281470] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819825, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.341604] env[63241]: DEBUG nova.compute.manager [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1349.378019] env[63241]: DEBUG nova.virt.hardware [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1349.378019] env[63241]: DEBUG nova.virt.hardware [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1349.378019] env[63241]: DEBUG nova.virt.hardware [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1349.378233] env[63241]: DEBUG nova.virt.hardware [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1349.378233] env[63241]: DEBUG nova.virt.hardware [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1349.378435] env[63241]: DEBUG nova.virt.hardware [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1349.378801] env[63241]: DEBUG nova.virt.hardware [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1349.379110] env[63241]: DEBUG nova.virt.hardware [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1349.379398] env[63241]: DEBUG nova.virt.hardware [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1349.382024] env[63241]: DEBUG nova.virt.hardware [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1349.382024] env[63241]: DEBUG nova.virt.hardware [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1349.382024] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 99eccbef-0e76-4532-af2f-5d74e563e1d2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.382708] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae4887f-21c7-4ab7-9251-dffb90f1afa2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.392557] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7dfc94d-b83c-434f-a74c-b553749b20f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.402368] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Acquiring lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.402368] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.528115] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819826, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.538495] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819831, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.624367] env[63241]: DEBUG nova.network.neutron [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Successfully updated port: 4388c54d-69af-4eb2-8f0b-e40773bf2e95 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1349.685076] env[63241]: DEBUG nova.compute.manager [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Received event network-changed-7168ddd9-eca4-4ba1-a734-ef4f493aa646 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1349.686262] env[63241]: DEBUG nova.compute.manager [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Refreshing instance network info cache due to event network-changed-7168ddd9-eca4-4ba1-a734-ef4f493aa646. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1349.686559] env[63241]: DEBUG oslo_concurrency.lockutils [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] Acquiring lock "refresh_cache-0440c0a8-f065-4a82-b190-33279e7c0d93" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.686709] env[63241]: DEBUG oslo_concurrency.lockutils [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] Acquired lock "refresh_cache-0440c0a8-f065-4a82-b190-33279e7c0d93" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.686875] env[63241]: DEBUG nova.network.neutron [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Refreshing network info cache for port 7168ddd9-eca4-4ba1-a734-ef4f493aa646 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1349.725860] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819828, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.725860] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819832, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.783699] env[63241]: DEBUG oslo_vmware.api [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819825, 'name': PowerOnVM_Task, 'duration_secs': 1.10187} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.783699] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1349.783699] env[63241]: INFO nova.compute.manager [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Took 14.68 seconds to spawn the instance on the hypervisor. [ 1349.783699] env[63241]: DEBUG nova.compute.manager [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1349.784119] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ec7a79-d3af-4cce-9b73-b26c9ab09ad9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.888584] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance a88ba00d-6644-4ecc-8603-a7d79ce8a4b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.892994] env[63241]: DEBUG nova.compute.manager [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Received event network-vif-plugged-39b9ee92-fa8c-4018-be8f-6ad78d44a1a8 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1349.894316] env[63241]: DEBUG oslo_concurrency.lockutils [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] Acquiring lock "780f3eee-f6c7-4054-8e6e-a370f74dc405-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.894316] env[63241]: DEBUG oslo_concurrency.lockutils [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] Lock "780f3eee-f6c7-4054-8e6e-a370f74dc405-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.894316] env[63241]: DEBUG oslo_concurrency.lockutils [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] Lock "780f3eee-f6c7-4054-8e6e-a370f74dc405-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.894316] env[63241]: DEBUG nova.compute.manager [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] No waiting events found dispatching network-vif-plugged-39b9ee92-fa8c-4018-be8f-6ad78d44a1a8 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1349.894316] env[63241]: WARNING nova.compute.manager [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Received unexpected event network-vif-plugged-39b9ee92-fa8c-4018-be8f-6ad78d44a1a8 for instance with vm_state building and task_state spawning. [ 1349.894708] env[63241]: DEBUG nova.compute.manager [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Received event network-changed-39b9ee92-fa8c-4018-be8f-6ad78d44a1a8 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1349.894708] env[63241]: DEBUG nova.compute.manager [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Refreshing instance network info cache due to event network-changed-39b9ee92-fa8c-4018-be8f-6ad78d44a1a8. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1349.894708] env[63241]: DEBUG oslo_concurrency.lockutils [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] Acquiring lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.894708] env[63241]: DEBUG oslo_concurrency.lockutils [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] Acquired lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.894708] env[63241]: DEBUG nova.network.neutron [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Refreshing network info cache for port 39b9ee92-fa8c-4018-be8f-6ad78d44a1a8 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1350.028110] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819826, 'name': CreateVM_Task, 'duration_secs': 1.05493} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.028789] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1350.032314] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.032404] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.032711] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1350.033929] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8ff54fd-ea1b-45e6-8ae4-97f593f0d43e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.039819] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1350.039819] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e7fe46-7ae0-bea0-c690-c15fc2622d6f" [ 1350.039819] env[63241]: _type = "Task" [ 1350.039819] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.043968] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819831, 'name': CreateVM_Task, 'duration_secs': 0.605075} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.048752] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1350.048932] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.056267] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e7fe46-7ae0-bea0-c690-c15fc2622d6f, 'name': SearchDatastore_Task, 'duration_secs': 0.010598} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.056620] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.056976] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1350.057336] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.057436] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.057645] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1350.058063] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.058221] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1350.058494] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6bdc578e-8fed-4b11-8aae-d2674caea286 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.060663] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4562eb51-732f-4364-b17b-1ed7df78806b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.066836] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1350.066836] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527514f2-960e-25cd-fbe4-6dacef5cbf4a" [ 1350.066836] env[63241]: _type = "Task" [ 1350.066836] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.073064] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1350.073064] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1350.075244] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9979a0c4-017a-4e25-a6ee-768a02dd79a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.083965] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527514f2-960e-25cd-fbe4-6dacef5cbf4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.087940] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1350.087940] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52bdc6fa-c9d3-3051-33ad-2d8b38e63791" [ 1350.087940] env[63241]: _type = "Task" [ 1350.087940] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.096858] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52bdc6fa-c9d3-3051-33ad-2d8b38e63791, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.128853] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquiring lock "refresh_cache-97890eda-0c1d-4423-acd2-60d3097c6f8a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.128853] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquired lock "refresh_cache-97890eda-0c1d-4423-acd2-60d3097c6f8a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.129044] env[63241]: DEBUG nova.network.neutron [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1350.215292] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819832, 'name': ReconfigVM_Task, 'duration_secs': 0.530199} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.217490] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Reconfigured VM instance instance-00000006 to attach disk [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1350.217490] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d52fa8bc-43e0-41e8-877f-7ea1be2b512a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.228394] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819828, 'name': PowerOnVM_Task} progress is 86%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.235653] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1350.235653] env[63241]: value = "task-1819833" [ 1350.235653] env[63241]: _type = "Task" [ 1350.235653] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.247555] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819833, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.280753] env[63241]: DEBUG nova.compute.manager [None req-b41de547-0c5c-457e-9295-b7e91c542605 tempest-ServerDiagnosticsV248Test-797217895 tempest-ServerDiagnosticsV248Test-797217895-project-admin] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1350.282517] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49452ed-aff5-4287-a9be-d53ee12285f1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.290290] env[63241]: INFO nova.compute.manager [None req-b41de547-0c5c-457e-9295-b7e91c542605 tempest-ServerDiagnosticsV248Test-797217895 tempest-ServerDiagnosticsV248Test-797217895-project-admin] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Retrieving diagnostics [ 1350.291216] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db41a27-a84a-40b6-a36d-165c26022eaf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.342610] env[63241]: INFO nova.compute.manager [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Took 24.98 seconds to build instance. [ 1350.391535] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 3c51d4dc-5a2c-4483-9aa5-8bab532971d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.498199] env[63241]: DEBUG nova.network.neutron [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Updated VIF entry in instance network info cache for port 7168ddd9-eca4-4ba1-a734-ef4f493aa646. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1350.498518] env[63241]: DEBUG nova.network.neutron [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Updating instance_info_cache with network_info: [{"id": "7168ddd9-eca4-4ba1-a734-ef4f493aa646", "address": "fa:16:3e:87:ec:da", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7168ddd9-ec", "ovs_interfaceid": "7168ddd9-eca4-4ba1-a734-ef4f493aa646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.578980] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527514f2-960e-25cd-fbe4-6dacef5cbf4a, 'name': SearchDatastore_Task, 'duration_secs': 0.021592} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.579109] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.579324] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1350.579537] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.599508] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52bdc6fa-c9d3-3051-33ad-2d8b38e63791, 'name': SearchDatastore_Task, 'duration_secs': 0.015474} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.600683] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eddd3fec-4571-4f8f-96e4-eccfd0f3186e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.606590] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1350.606590] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5284ac3f-d659-411d-e9b8-5f56090add1b" [ 1350.606590] env[63241]: _type = "Task" [ 1350.606590] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.615802] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5284ac3f-d659-411d-e9b8-5f56090add1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.633745] env[63241]: DEBUG nova.network.neutron [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Updated VIF entry in instance network info cache for port 39b9ee92-fa8c-4018-be8f-6ad78d44a1a8. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1350.634117] env[63241]: DEBUG nova.network.neutron [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Updating instance_info_cache with network_info: [{"id": "39b9ee92-fa8c-4018-be8f-6ad78d44a1a8", "address": "fa:16:3e:02:28:3a", "network": {"id": "66538b1c-dfa7-4a9a-84ed-9775e692d300", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1045273516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06bbbe738ef34806971a4883b7bb3cc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39b9ee92-fa", "ovs_interfaceid": "39b9ee92-fa8c-4018-be8f-6ad78d44a1a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.662019] env[63241]: DEBUG nova.network.neutron [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1350.715975] env[63241]: DEBUG oslo_vmware.api [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1819828, 'name': PowerOnVM_Task, 'duration_secs': 1.582364} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.716281] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1350.716457] env[63241]: INFO nova.compute.manager [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Took 13.30 seconds to spawn the instance on the hypervisor. [ 1350.716635] env[63241]: DEBUG nova.compute.manager [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1350.717424] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f444a3f7-4e68-4f83-b8e4-9c0c57a2c3a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.746030] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819833, 'name': Rename_Task, 'duration_secs': 0.37695} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.746305] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1350.746564] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a1e3665-a909-4e7c-adf1-6f3f2528829e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.755496] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1350.755496] env[63241]: value = "task-1819834" [ 1350.755496] env[63241]: _type = "Task" [ 1350.755496] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.765757] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819834, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.844848] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb05d6a1-1303-4fe9-b982-09a59577b859 tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Lock "0440c0a8-f065-4a82-b190-33279e7c0d93" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.498s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.894750] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance eb506425-4ecc-44b7-afa4-0901fc60b04f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.896747] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1350.896747] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1350.933881] env[63241]: DEBUG nova.network.neutron [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Updating instance_info_cache with network_info: [{"id": "4388c54d-69af-4eb2-8f0b-e40773bf2e95", "address": "fa:16:3e:27:1f:ac", "network": {"id": "76566fa9-e905-4509-ba1e-02e3aa059cae", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1889079318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69f026988ee840638c4d1163b629d7cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4388c54d-69", "ovs_interfaceid": "4388c54d-69af-4eb2-8f0b-e40773bf2e95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.003980] env[63241]: DEBUG oslo_concurrency.lockutils [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] Releasing lock "refresh_cache-0440c0a8-f065-4a82-b190-33279e7c0d93" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.003980] env[63241]: DEBUG nova.compute.manager [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Received event network-vif-plugged-bc8209bb-1cd1-4efc-806c-3fb04ffc73c5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1351.003980] env[63241]: DEBUG oslo_concurrency.lockutils [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] Acquiring lock "a1a8342a-b00e-42c1-8c01-a95659a78caf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.003980] env[63241]: DEBUG oslo_concurrency.lockutils [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] Lock "a1a8342a-b00e-42c1-8c01-a95659a78caf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.003980] env[63241]: DEBUG oslo_concurrency.lockutils [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] Lock "a1a8342a-b00e-42c1-8c01-a95659a78caf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.004246] env[63241]: DEBUG nova.compute.manager [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] No waiting events found dispatching network-vif-plugged-bc8209bb-1cd1-4efc-806c-3fb04ffc73c5 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1351.004246] env[63241]: WARNING nova.compute.manager [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Received unexpected event network-vif-plugged-bc8209bb-1cd1-4efc-806c-3fb04ffc73c5 for instance with vm_state building and task_state spawning. [ 1351.004246] env[63241]: DEBUG nova.compute.manager [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Received event network-changed-bc8209bb-1cd1-4efc-806c-3fb04ffc73c5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1351.004246] env[63241]: DEBUG nova.compute.manager [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Refreshing instance network info cache due to event network-changed-bc8209bb-1cd1-4efc-806c-3fb04ffc73c5. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1351.004246] env[63241]: DEBUG oslo_concurrency.lockutils [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] Acquiring lock "refresh_cache-a1a8342a-b00e-42c1-8c01-a95659a78caf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.004398] env[63241]: DEBUG oslo_concurrency.lockutils [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] Acquired lock "refresh_cache-a1a8342a-b00e-42c1-8c01-a95659a78caf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.004398] env[63241]: DEBUG nova.network.neutron [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Refreshing network info cache for port bc8209bb-1cd1-4efc-806c-3fb04ffc73c5 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1351.036608] env[63241]: DEBUG oslo_concurrency.lockutils [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Acquiring lock "bbb94f08-7df2-457e-bc5b-d0008839cf20" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.036764] env[63241]: DEBUG oslo_concurrency.lockutils [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Lock "bbb94f08-7df2-457e-bc5b-d0008839cf20" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.036964] env[63241]: DEBUG oslo_concurrency.lockutils [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Acquiring lock "bbb94f08-7df2-457e-bc5b-d0008839cf20-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.037158] env[63241]: DEBUG oslo_concurrency.lockutils [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Lock "bbb94f08-7df2-457e-bc5b-d0008839cf20-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.037322] env[63241]: DEBUG oslo_concurrency.lockutils [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Lock "bbb94f08-7df2-457e-bc5b-d0008839cf20-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.039219] env[63241]: INFO nova.compute.manager [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Terminating instance [ 1351.043410] env[63241]: DEBUG nova.compute.manager [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1351.043733] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1351.044706] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6dedde-b341-4134-81b3-bded21ea07c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.055164] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1351.058303] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6760a8c9-fd85-496e-8d8e-6c64c098c26f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.066661] env[63241]: DEBUG oslo_vmware.api [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Waiting for the task: (returnval){ [ 1351.066661] env[63241]: value = "task-1819835" [ 1351.066661] env[63241]: _type = "Task" [ 1351.066661] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.078240] env[63241]: DEBUG oslo_vmware.api [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Task: {'id': task-1819835, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.120454] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5284ac3f-d659-411d-e9b8-5f56090add1b, 'name': SearchDatastore_Task, 'duration_secs': 0.019105} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.120998] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.121336] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0e5447fd-a04f-4bc2-b329-e015883773b8/0e5447fd-a04f-4bc2-b329-e015883773b8.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1351.121715] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.121951] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1351.122252] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1472166a-03a9-4dcd-b56a-b9a5f19e404a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.127831] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1059689c-8d9a-4854-a444-b41bce7dc97c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.139916] env[63241]: DEBUG oslo_concurrency.lockutils [req-3a419ced-57f3-4709-b255-1a4586a23307 req-5602207c-f872-450f-af5c-d4c575216323 service nova] Releasing lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.140508] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1351.140700] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1351.141544] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1351.141544] env[63241]: value = "task-1819836" [ 1351.141544] env[63241]: _type = "Task" [ 1351.141544] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.141840] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d59e3d3-6364-405f-9cb0-1c677372eff3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.154495] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1351.154495] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a2fe11-a9ac-d0c8-981c-e49e08d482f8" [ 1351.154495] env[63241]: _type = "Task" [ 1351.154495] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.160223] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819836, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.171197] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a2fe11-a9ac-d0c8-981c-e49e08d482f8, 'name': SearchDatastore_Task, 'duration_secs': 0.009914} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.172436] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-238ed931-1ed0-4342-8008-d9ecbd9a43b3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.180609] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1351.180609] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52804379-8fb4-8ba3-902f-230164647862" [ 1351.180609] env[63241]: _type = "Task" [ 1351.180609] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.194590] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52804379-8fb4-8ba3-902f-230164647862, 'name': SearchDatastore_Task, 'duration_secs': 0.010378} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.195900] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.195900] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] fbbb7682-873d-4bb0-8d39-4aec3566b0af/fbbb7682-873d-4bb0-8d39-4aec3566b0af.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1351.195900] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba3081f5-c65c-4d73-958b-4945d616466e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.208153] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1351.208153] env[63241]: value = "task-1819837" [ 1351.208153] env[63241]: _type = "Task" [ 1351.208153] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.214665] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819837, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.218020] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a16142-9cff-4f86-8f34-f988bcce9ea6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.226637] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcbee26-859f-43ed-ad71-2160a049f3e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.278565] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da4d85b-00d7-4afc-9325-0e10b1181534 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.281808] env[63241]: INFO nova.compute.manager [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Took 24.01 seconds to build instance. [ 1351.290394] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819834, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.296030] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdbca5c-d956-48a1-a081-ff2b63a404a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.310763] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1351.356558] env[63241]: DEBUG nova.compute.manager [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1351.437538] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Releasing lock "refresh_cache-97890eda-0c1d-4423-acd2-60d3097c6f8a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.437538] env[63241]: DEBUG nova.compute.manager [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Instance network_info: |[{"id": "4388c54d-69af-4eb2-8f0b-e40773bf2e95", "address": "fa:16:3e:27:1f:ac", "network": {"id": "76566fa9-e905-4509-ba1e-02e3aa059cae", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1889079318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69f026988ee840638c4d1163b629d7cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4388c54d-69", "ovs_interfaceid": "4388c54d-69af-4eb2-8f0b-e40773bf2e95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1351.438273] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:1f:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e27fd35-1d7b-4358-92d5-4d34da27b992', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4388c54d-69af-4eb2-8f0b-e40773bf2e95', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1351.446731] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Creating folder: Project (69f026988ee840638c4d1163b629d7cb). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1351.447264] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c71c0b4-cda5-4f00-a40c-9d8c68432ab5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.463439] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Created folder: Project (69f026988ee840638c4d1163b629d7cb) in parent group-v376927. [ 1351.463593] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Creating folder: Instances. Parent ref: group-v376955. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1351.465982] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8cb5347c-2736-442c-8409-f73d8c86e53a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.475010] env[63241]: DEBUG nova.network.neutron [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Successfully updated port: 61bceac0-2e58-4bc3-92f6-c421aabdfc8b {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1351.478595] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Created folder: Instances in parent group-v376955. [ 1351.478921] env[63241]: DEBUG oslo.service.loopingcall [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1351.479635] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1351.479793] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f28cce62-0751-49e0-97bb-c446434e1687 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.518705] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1351.518705] env[63241]: value = "task-1819840" [ 1351.518705] env[63241]: _type = "Task" [ 1351.518705] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.530066] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819840, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.580415] env[63241]: DEBUG oslo_vmware.api [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Task: {'id': task-1819835, 'name': PowerOffVM_Task, 'duration_secs': 0.235427} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.580731] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1351.580984] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1351.581266] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74c6f4a1-2703-4df3-b87f-13b23575a281 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.657561] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819836, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463602} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.658079] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0e5447fd-a04f-4bc2-b329-e015883773b8/0e5447fd-a04f-4bc2-b329-e015883773b8.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1351.658079] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1351.662291] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2509c729-18cd-45f0-82a8-ad81b63b7102 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.669042] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1351.669042] env[63241]: value = "task-1819842" [ 1351.669042] env[63241]: _type = "Task" [ 1351.669042] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.677267] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1351.677605] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1351.677895] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Deleting the datastore file [datastore1] bbb94f08-7df2-457e-bc5b-d0008839cf20 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1351.678278] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea374cd0-2545-4ccf-92b6-3192e4f3025d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.684361] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819842, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.691639] env[63241]: DEBUG oslo_vmware.api [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Waiting for the task: (returnval){ [ 1351.691639] env[63241]: value = "task-1819843" [ 1351.691639] env[63241]: _type = "Task" [ 1351.691639] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.701531] env[63241]: DEBUG oslo_vmware.api [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Task: {'id': task-1819843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.715748] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819837, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.789019] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d724796d-8e0c-47fb-8f61-2416990de088 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Lock "780f3eee-f6c7-4054-8e6e-a370f74dc405" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.537s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.792123] env[63241]: DEBUG oslo_vmware.api [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819834, 'name': PowerOnVM_Task, 'duration_secs': 0.641246} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.795115] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1351.795115] env[63241]: INFO nova.compute.manager [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Took 12.08 seconds to spawn the instance on the hypervisor. [ 1351.795115] env[63241]: DEBUG nova.compute.manager [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1351.795115] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92df7305-431d-48a2-83e3-7b49b8c24201 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.843035] env[63241]: ERROR nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [req-e6a255cd-b054-4935-9a59-23ce556e5903] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e6a255cd-b054-4935-9a59-23ce556e5903"}]} [ 1351.876102] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1351.886201] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.895588] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1351.895790] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1351.909398] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1351.941799] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1351.980291] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1351.980445] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquired lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.980596] env[63241]: DEBUG nova.network.neutron [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1351.991785] env[63241]: DEBUG nova.network.neutron [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Updated VIF entry in instance network info cache for port bc8209bb-1cd1-4efc-806c-3fb04ffc73c5. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1351.992022] env[63241]: DEBUG nova.network.neutron [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Updating instance_info_cache with network_info: [{"id": "bc8209bb-1cd1-4efc-806c-3fb04ffc73c5", "address": "fa:16:3e:02:50:ff", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8209bb-1c", "ovs_interfaceid": "bc8209bb-1cd1-4efc-806c-3fb04ffc73c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.031569] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819840, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.188538] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819842, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.253198} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.189373] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1352.190950] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7a5272-c1c9-4c5f-860a-fe2654ff346f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.234047] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e363c52-d7e2-4654-aa95-f4b7708c81c4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.254116] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 0e5447fd-a04f-4bc2-b329-e015883773b8/0e5447fd-a04f-4bc2-b329-e015883773b8.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1352.262087] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8aee3654-10de-48fd-a520-05f6e309e244 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.283299] env[63241]: DEBUG oslo_vmware.api [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Task: {'id': task-1819843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.39526} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.285794] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1352.286038] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1352.286251] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1352.286453] env[63241]: INFO nova.compute.manager [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1352.286792] env[63241]: DEBUG oslo.service.loopingcall [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1352.287691] env[63241]: DEBUG nova.compute.manager [-] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1352.287793] env[63241]: DEBUG nova.network.neutron [-] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1352.295590] env[63241]: DEBUG nova.compute.manager [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1352.297398] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819837, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70231} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.301599] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] fbbb7682-873d-4bb0-8d39-4aec3566b0af/fbbb7682-873d-4bb0-8d39-4aec3566b0af.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1352.301599] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1352.301599] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17af7a08-7631-4b39-a6e0-6322a1bfc64c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.303740] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1352.303740] env[63241]: value = "task-1819844" [ 1352.303740] env[63241]: _type = "Task" [ 1352.303740] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.303917] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c41b926-7f95-42fb-aa91-45d6ac1f84f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.347715] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3d17c5-cdf2-47df-9b60-6a8a5b5ad910 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.350834] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1352.350834] env[63241]: value = "task-1819845" [ 1352.350834] env[63241]: _type = "Task" [ 1352.350834] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.355377] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819844, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.355377] env[63241]: INFO nova.compute.manager [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Took 21.26 seconds to build instance. [ 1352.365889] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437208d7-c6f5-48ca-b78b-b2d30c5699f9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.373420] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819845, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.386985] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1352.495439] env[63241]: DEBUG oslo_concurrency.lockutils [req-62885564-7066-47e9-84cc-e494d1755371 req-f6e1fa85-d458-409f-b7bd-60fc7c0cddbe service nova] Releasing lock "refresh_cache-a1a8342a-b00e-42c1-8c01-a95659a78caf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.531133] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819840, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.553778] env[63241]: DEBUG nova.network.neutron [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1352.650793] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "11b1888e-95ec-4166-9219-0c38f8817dd4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.650950] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "11b1888e-95ec-4166-9219-0c38f8817dd4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.819426] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819844, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.819426] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.864144] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b7804bb-2097-47fc-aa2e-2f9fcac799ec tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "a1a8342a-b00e-42c1-8c01-a95659a78caf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.779s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.872313] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819845, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092918} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.872579] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1352.873458] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc66d44a-5ece-4828-a444-abb85a5dcfe2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.891607] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1352.903410] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] fbbb7682-873d-4bb0-8d39-4aec3566b0af/fbbb7682-873d-4bb0-8d39-4aec3566b0af.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1352.905423] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4bd360b-9106-4f72-8882-ba2312e8451f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.928624] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "94a604da-ad3d-415a-aa92-d648e3da803d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1352.928624] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "94a604da-ad3d-415a-aa92-d648e3da803d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1352.937714] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1352.937714] env[63241]: value = "task-1819846" [ 1352.937714] env[63241]: _type = "Task" [ 1352.937714] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.951948] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819846, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.041283] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819840, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.070579] env[63241]: DEBUG nova.network.neutron [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Updating instance_info_cache with network_info: [{"id": "61bceac0-2e58-4bc3-92f6-c421aabdfc8b", "address": "fa:16:3e:22:0e:78", "network": {"id": "7f61cd58-880b-488b-8ae6-bc6d0b2a3806", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1752925058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1a99e9ac8f4fc0bbd763a9d91321af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61bceac0-2e", "ovs_interfaceid": "61bceac0-2e58-4bc3-92f6-c421aabdfc8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.202729] env[63241]: INFO nova.compute.manager [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Rebuilding instance [ 1353.260155] env[63241]: DEBUG nova.compute.manager [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1353.261039] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6f7f06-9609-4eaf-874b-d63810dea7ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.321018] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819844, 'name': ReconfigVM_Task, 'duration_secs': 0.761216} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.322029] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 0e5447fd-a04f-4bc2-b329-e015883773b8/0e5447fd-a04f-4bc2-b329-e015883773b8.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1353.322161] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c654c680-4541-4e33-9419-7a7c78f0895b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.332602] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1353.332602] env[63241]: value = "task-1819847" [ 1353.332602] env[63241]: _type = "Task" [ 1353.332602] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.344586] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819847, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.366576] env[63241]: DEBUG nova.compute.manager [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1353.406902] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1353.406902] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.104s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.407097] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.262s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.409340] env[63241]: INFO nova.compute.claims [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1353.452983] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819846, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.535400] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819840, 'name': CreateVM_Task, 'duration_secs': 1.720397} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.535665] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1353.536415] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.536531] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.536849] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1353.537138] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22dc9bf5-7ad5-4e44-8915-0a3104be8b3a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.543198] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for the task: (returnval){ [ 1353.543198] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a8c310-3c45-c0fa-8333-ea224b06f1b4" [ 1353.543198] env[63241]: _type = "Task" [ 1353.543198] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.546839] env[63241]: DEBUG nova.network.neutron [-] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.557061] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a8c310-3c45-c0fa-8333-ea224b06f1b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.573036] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Releasing lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.573464] env[63241]: DEBUG nova.compute.manager [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Instance network_info: |[{"id": "61bceac0-2e58-4bc3-92f6-c421aabdfc8b", "address": "fa:16:3e:22:0e:78", "network": {"id": "7f61cd58-880b-488b-8ae6-bc6d0b2a3806", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1752925058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1a99e9ac8f4fc0bbd763a9d91321af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61bceac0-2e", "ovs_interfaceid": "61bceac0-2e58-4bc3-92f6-c421aabdfc8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1353.573856] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:0e:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c118a9ee-84f7-4f09-8a21-05600ed3cc06', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61bceac0-2e58-4bc3-92f6-c421aabdfc8b', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1353.582209] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Creating folder: Project (1b1a99e9ac8f4fc0bbd763a9d91321af). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1353.583861] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8446a842-343e-4d2c-a16f-7bb6c02f75b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.589512] env[63241]: DEBUG nova.compute.manager [None req-0162d53c-2961-400f-b613-2a227e367208 tempest-ServerExternalEventsTest-1325232468 tempest-ServerExternalEventsTest-1325232468-project] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Received event network-changed {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1353.589638] env[63241]: DEBUG nova.compute.manager [None req-0162d53c-2961-400f-b613-2a227e367208 tempest-ServerExternalEventsTest-1325232468 tempest-ServerExternalEventsTest-1325232468-project] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Refreshing instance network info cache due to event network-changed. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1353.589890] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0162d53c-2961-400f-b613-2a227e367208 tempest-ServerExternalEventsTest-1325232468 tempest-ServerExternalEventsTest-1325232468-project] Acquiring lock "refresh_cache-0440c0a8-f065-4a82-b190-33279e7c0d93" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.590052] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0162d53c-2961-400f-b613-2a227e367208 tempest-ServerExternalEventsTest-1325232468 tempest-ServerExternalEventsTest-1325232468-project] Acquired lock "refresh_cache-0440c0a8-f065-4a82-b190-33279e7c0d93" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.590216] env[63241]: DEBUG nova.network.neutron [None req-0162d53c-2961-400f-b613-2a227e367208 tempest-ServerExternalEventsTest-1325232468 tempest-ServerExternalEventsTest-1325232468-project] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1353.605313] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Created folder: Project (1b1a99e9ac8f4fc0bbd763a9d91321af) in parent group-v376927. [ 1353.605530] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Creating folder: Instances. Parent ref: group-v376958. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1353.605767] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58b555c0-9e23-45b1-acfd-ef14b4d7f8cc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.612422] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Received event network-vif-plugged-c55c077c-917c-45af-a4ea-c51ad8696209 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1353.612422] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Acquiring lock "0e5447fd-a04f-4bc2-b329-e015883773b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.612422] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Lock "0e5447fd-a04f-4bc2-b329-e015883773b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.612422] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Lock "0e5447fd-a04f-4bc2-b329-e015883773b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.612422] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] No waiting events found dispatching network-vif-plugged-c55c077c-917c-45af-a4ea-c51ad8696209 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1353.612760] env[63241]: WARNING nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Received unexpected event network-vif-plugged-c55c077c-917c-45af-a4ea-c51ad8696209 for instance with vm_state building and task_state spawning. [ 1353.612760] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Received event network-changed-c55c077c-917c-45af-a4ea-c51ad8696209 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1353.612760] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Refreshing instance network info cache due to event network-changed-c55c077c-917c-45af-a4ea-c51ad8696209. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1353.612760] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Acquiring lock "refresh_cache-0e5447fd-a04f-4bc2-b329-e015883773b8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.612760] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Acquired lock "refresh_cache-0e5447fd-a04f-4bc2-b329-e015883773b8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.613270] env[63241]: DEBUG nova.network.neutron [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Refreshing network info cache for port c55c077c-917c-45af-a4ea-c51ad8696209 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1353.625602] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Created folder: Instances in parent group-v376958. [ 1353.626013] env[63241]: DEBUG oslo.service.loopingcall [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1353.626340] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1353.626670] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78311374-1433-4610-a287-c40d9204c715 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.651111] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1353.651111] env[63241]: value = "task-1819850" [ 1353.651111] env[63241]: _type = "Task" [ 1353.651111] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.661640] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819850, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.777140] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1353.778233] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-967fe7f1-9700-459c-b84f-da5cae8ba00c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.780413] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquiring lock "69c73342-258a-4b00-ba1b-ffdd5f247890" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.780706] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Lock "69c73342-258a-4b00-ba1b-ffdd5f247890" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.780959] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquiring lock "69c73342-258a-4b00-ba1b-ffdd5f247890-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.781233] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Lock "69c73342-258a-4b00-ba1b-ffdd5f247890-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.781459] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Lock "69c73342-258a-4b00-ba1b-ffdd5f247890-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.784106] env[63241]: INFO nova.compute.manager [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Terminating instance [ 1353.786337] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquiring lock "refresh_cache-69c73342-258a-4b00-ba1b-ffdd5f247890" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.786482] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquired lock "refresh_cache-69c73342-258a-4b00-ba1b-ffdd5f247890" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.786688] env[63241]: DEBUG nova.network.neutron [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1353.790799] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Waiting for the task: (returnval){ [ 1353.790799] env[63241]: value = "task-1819851" [ 1353.790799] env[63241]: _type = "Task" [ 1353.790799] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.802988] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819851, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.843401] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819847, 'name': Rename_Task, 'duration_secs': 0.332456} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.843707] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1353.843956] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48bedbd7-74af-48fe-909e-c04bf662f710 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.847874] env[63241]: DEBUG nova.compute.manager [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Received event network-vif-plugged-b508f7cc-ab52-4957-9c5e-500a7f0b2335 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1353.848095] env[63241]: DEBUG oslo_concurrency.lockutils [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] Acquiring lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.848302] env[63241]: DEBUG oslo_concurrency.lockutils [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] Lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.848468] env[63241]: DEBUG oslo_concurrency.lockutils [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] Lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.848670] env[63241]: DEBUG nova.compute.manager [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] No waiting events found dispatching network-vif-plugged-b508f7cc-ab52-4957-9c5e-500a7f0b2335 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1353.848818] env[63241]: WARNING nova.compute.manager [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Received unexpected event network-vif-plugged-b508f7cc-ab52-4957-9c5e-500a7f0b2335 for instance with vm_state building and task_state spawning. [ 1353.849029] env[63241]: DEBUG nova.compute.manager [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Received event network-changed-b508f7cc-ab52-4957-9c5e-500a7f0b2335 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1353.849205] env[63241]: DEBUG nova.compute.manager [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Refreshing instance network info cache due to event network-changed-b508f7cc-ab52-4957-9c5e-500a7f0b2335. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1353.849427] env[63241]: DEBUG oslo_concurrency.lockutils [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] Acquiring lock "refresh_cache-fbbb7682-873d-4bb0-8d39-4aec3566b0af" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.849593] env[63241]: DEBUG oslo_concurrency.lockutils [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] Acquired lock "refresh_cache-fbbb7682-873d-4bb0-8d39-4aec3566b0af" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.849787] env[63241]: DEBUG nova.network.neutron [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Refreshing network info cache for port b508f7cc-ab52-4957-9c5e-500a7f0b2335 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1353.858194] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1353.858194] env[63241]: value = "task-1819852" [ 1353.858194] env[63241]: _type = "Task" [ 1353.858194] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.886198] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819852, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.896667] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.950763] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819846, 'name': ReconfigVM_Task, 'duration_secs': 0.797559} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.951336] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Reconfigured VM instance instance-00000008 to attach disk [datastore1] fbbb7682-873d-4bb0-8d39-4aec3566b0af/fbbb7682-873d-4bb0-8d39-4aec3566b0af.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1353.952643] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba43a127-18e7-44bc-8e42-69fb1eece5bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.965920] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1353.965920] env[63241]: value = "task-1819853" [ 1353.965920] env[63241]: _type = "Task" [ 1353.965920] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.979699] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819853, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.050173] env[63241]: INFO nova.compute.manager [-] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Took 1.76 seconds to deallocate network for instance. [ 1354.062013] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a8c310-3c45-c0fa-8333-ea224b06f1b4, 'name': SearchDatastore_Task, 'duration_secs': 0.014321} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.069789] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.070189] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1354.070482] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.070644] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.070848] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1354.071904] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d18d954-2b5b-4826-b23c-0a4b53071b64 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.083920] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1354.084189] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1354.085643] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92847cde-2613-43a2-b1e7-cb0f15226fc0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.094442] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for the task: (returnval){ [ 1354.094442] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a2de03-c66d-7a62-b3f1-2db751cd09cf" [ 1354.094442] env[63241]: _type = "Task" [ 1354.094442] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.123220] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a2de03-c66d-7a62-b3f1-2db751cd09cf, 'name': SearchDatastore_Task, 'duration_secs': 0.01323} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.124957] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfec340b-c992-4916-8ae5-53a944edd1ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.133348] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for the task: (returnval){ [ 1354.133348] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523e9f71-5088-a163-2174-87789fb5eca0" [ 1354.133348] env[63241]: _type = "Task" [ 1354.133348] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.146178] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523e9f71-5088-a163-2174-87789fb5eca0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.163410] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819850, 'name': CreateVM_Task, 'duration_secs': 0.479639} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.163643] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1354.164342] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.164510] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.164845] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1354.165110] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c6db120-4003-4ca7-a752-8d1f05e78488 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.172625] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1354.172625] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524845f8-84ad-ecb2-da45-768b0af56ebc" [ 1354.172625] env[63241]: _type = "Task" [ 1354.172625] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.183371] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524845f8-84ad-ecb2-da45-768b0af56ebc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.303647] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819851, 'name': PowerOffVM_Task, 'duration_secs': 0.177687} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.303933] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1354.304185] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1354.305163] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa17bd66-fd58-455e-84f9-6c76f0caa329 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.315970] env[63241]: DEBUG nova.network.neutron [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1354.336408] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1354.336408] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f54a33c-a144-484c-9ca5-99ea9cd7dc96 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.373352] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1354.373581] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1354.373756] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Deleting the datastore file [datastore1] b4182e53-50db-4256-b376-b00100778935 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1354.374032] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819852, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.374272] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a78742a-7882-411e-b7b1-76709bb75900 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.384088] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Waiting for the task: (returnval){ [ 1354.384088] env[63241]: value = "task-1819855" [ 1354.384088] env[63241]: _type = "Task" [ 1354.384088] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.394630] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.442826] env[63241]: DEBUG nova.network.neutron [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.478573] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819853, 'name': Rename_Task, 'duration_secs': 0.201104} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.482778] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1354.487147] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33743470-5c32-4699-95d5-21feb659ac5b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.498345] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1354.498345] env[63241]: value = "task-1819856" [ 1354.498345] env[63241]: _type = "Task" [ 1354.498345] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.513106] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819856, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.575382] env[63241]: DEBUG oslo_concurrency.lockutils [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.645691] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523e9f71-5088-a163-2174-87789fb5eca0, 'name': SearchDatastore_Task, 'duration_secs': 0.014644} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.648776] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.649104] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 97890eda-0c1d-4423-acd2-60d3097c6f8a/97890eda-0c1d-4423-acd2-60d3097c6f8a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1354.649602] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-953324a9-3133-4364-9919-3c310993be39 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.659604] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for the task: (returnval){ [ 1354.659604] env[63241]: value = "task-1819857" [ 1354.659604] env[63241]: _type = "Task" [ 1354.659604] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.676022] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.676022] env[63241]: DEBUG nova.network.neutron [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Updated VIF entry in instance network info cache for port b508f7cc-ab52-4957-9c5e-500a7f0b2335. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1354.676273] env[63241]: DEBUG nova.network.neutron [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Updating instance_info_cache with network_info: [{"id": "b508f7cc-ab52-4957-9c5e-500a7f0b2335", "address": "fa:16:3e:b9:16:53", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb508f7cc-ab", "ovs_interfaceid": "b508f7cc-ab52-4957-9c5e-500a7f0b2335", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.690570] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524845f8-84ad-ecb2-da45-768b0af56ebc, 'name': SearchDatastore_Task, 'duration_secs': 0.016767} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.693868] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.694141] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1354.694481] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.694653] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.694837] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1354.699145] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-018627e1-9735-43d1-b3f3-4b34a50b10f4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.713969] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1354.714169] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1354.715497] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3501f16a-30a2-4aa4-a898-a1d749d09a15 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.728130] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1354.728130] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52629d8c-d1a3-0cb2-7e4c-4fc15522478e" [ 1354.728130] env[63241]: _type = "Task" [ 1354.728130] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.740055] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52629d8c-d1a3-0cb2-7e4c-4fc15522478e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.806303] env[63241]: DEBUG nova.network.neutron [None req-0162d53c-2961-400f-b613-2a227e367208 tempest-ServerExternalEventsTest-1325232468 tempest-ServerExternalEventsTest-1325232468-project] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Updating instance_info_cache with network_info: [{"id": "7168ddd9-eca4-4ba1-a734-ef4f493aa646", "address": "fa:16:3e:87:ec:da", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7168ddd9-ec", "ovs_interfaceid": "7168ddd9-eca4-4ba1-a734-ef4f493aa646", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.816986] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e36388-1fe3-42c4-b4a9-42582fd2c28b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.822649] env[63241]: DEBUG nova.network.neutron [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Updated VIF entry in instance network info cache for port c55c077c-917c-45af-a4ea-c51ad8696209. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1354.823078] env[63241]: DEBUG nova.network.neutron [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Updating instance_info_cache with network_info: [{"id": "c55c077c-917c-45af-a4ea-c51ad8696209", "address": "fa:16:3e:41:1d:ed", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc55c077c-91", "ovs_interfaceid": "c55c077c-917c-45af-a4ea-c51ad8696209", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.827892] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf2a24f-a03a-4b39-8e15-100b44ad2960 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.869462] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a571804a-da09-42c9-ace6-c752939a2b15 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.879288] env[63241]: DEBUG oslo_vmware.api [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1819852, 'name': PowerOnVM_Task, 'duration_secs': 0.647493} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.881463] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1354.881721] env[63241]: INFO nova.compute.manager [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Took 12.78 seconds to spawn the instance on the hypervisor. [ 1354.881936] env[63241]: DEBUG nova.compute.manager [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1354.883171] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0770e3ee-ce30-4971-bd6a-6f473d44e7d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.887410] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdabe37-d6e1-4581-9ffd-1e0ce89a5d83 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.902924] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.304909} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.914584] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1354.914852] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1354.915076] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1354.918448] env[63241]: DEBUG nova.compute.provider_tree [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1354.949164] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Releasing lock "refresh_cache-69c73342-258a-4b00-ba1b-ffdd5f247890" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.950377] env[63241]: DEBUG nova.compute.manager [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1354.950377] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1354.951913] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03027fa2-7a7a-4f9f-8146-f348e6990118 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.964525] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1354.964709] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1158a860-cbe2-4204-9765-cdd4513cee3f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.973255] env[63241]: DEBUG oslo_vmware.api [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1354.973255] env[63241]: value = "task-1819858" [ 1354.973255] env[63241]: _type = "Task" [ 1354.973255] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.986701] env[63241]: DEBUG oslo_vmware.api [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.012432] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819856, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.173268] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819857, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.181332] env[63241]: DEBUG oslo_concurrency.lockutils [req-0f8edd71-8be5-4654-87e4-e062514edaef req-d928075c-4ceb-4884-b3ac-4988bee5e2af service nova] Releasing lock "refresh_cache-fbbb7682-873d-4bb0-8d39-4aec3566b0af" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.244550] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52629d8c-d1a3-0cb2-7e4c-4fc15522478e, 'name': SearchDatastore_Task, 'duration_secs': 0.022501} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.245687] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fd2240f-fe11-4474-9b69-0efc6642922b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.254878] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1355.254878] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52202412-5c71-4c30-e7c1-8b2d9b991ab7" [ 1355.254878] env[63241]: _type = "Task" [ 1355.254878] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.267854] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52202412-5c71-4c30-e7c1-8b2d9b991ab7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.308752] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0162d53c-2961-400f-b613-2a227e367208 tempest-ServerExternalEventsTest-1325232468 tempest-ServerExternalEventsTest-1325232468-project] Releasing lock "refresh_cache-0440c0a8-f065-4a82-b190-33279e7c0d93" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.326396] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Releasing lock "refresh_cache-0e5447fd-a04f-4bc2-b329-e015883773b8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.326912] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Received event network-vif-plugged-4388c54d-69af-4eb2-8f0b-e40773bf2e95 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1355.327127] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Acquiring lock "97890eda-0c1d-4423-acd2-60d3097c6f8a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.327347] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Lock "97890eda-0c1d-4423-acd2-60d3097c6f8a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.327813] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Lock "97890eda-0c1d-4423-acd2-60d3097c6f8a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.328019] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] No waiting events found dispatching network-vif-plugged-4388c54d-69af-4eb2-8f0b-e40773bf2e95 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1355.328342] env[63241]: WARNING nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Received unexpected event network-vif-plugged-4388c54d-69af-4eb2-8f0b-e40773bf2e95 for instance with vm_state building and task_state spawning. [ 1355.328342] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Received event network-changed-4388c54d-69af-4eb2-8f0b-e40773bf2e95 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1355.328746] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Refreshing instance network info cache due to event network-changed-4388c54d-69af-4eb2-8f0b-e40773bf2e95. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1355.328746] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Acquiring lock "refresh_cache-97890eda-0c1d-4423-acd2-60d3097c6f8a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.328932] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Acquired lock "refresh_cache-97890eda-0c1d-4423-acd2-60d3097c6f8a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.328986] env[63241]: DEBUG nova.network.neutron [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Refreshing network info cache for port 4388c54d-69af-4eb2-8f0b-e40773bf2e95 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1355.437426] env[63241]: DEBUG nova.scheduler.client.report [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1355.440546] env[63241]: INFO nova.compute.manager [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Took 22.69 seconds to build instance. [ 1355.485691] env[63241]: DEBUG oslo_vmware.api [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819858, 'name': PowerOffVM_Task, 'duration_secs': 0.174325} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.486409] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1355.486658] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1355.486848] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2b1cb8e-92c3-43c2-9d12-2fe75e3374d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.514200] env[63241]: DEBUG oslo_vmware.api [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1819856, 'name': PowerOnVM_Task, 'duration_secs': 0.844416} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.515688] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1355.515937] env[63241]: INFO nova.compute.manager [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Took 11.02 seconds to spawn the instance on the hypervisor. [ 1355.516148] env[63241]: DEBUG nova.compute.manager [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1355.517022] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0394f884-df15-41af-a0d5-5916197ec33e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.522433] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1355.522896] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1355.523114] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Deleting the datastore file [datastore1] 69c73342-258a-4b00-ba1b-ffdd5f247890 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1355.523745] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86c4ec00-0bf8-4643-b502-b8a25b6e0bee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.537667] env[63241]: DEBUG oslo_vmware.api [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for the task: (returnval){ [ 1355.537667] env[63241]: value = "task-1819860" [ 1355.537667] env[63241]: _type = "Task" [ 1355.537667] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.548585] env[63241]: DEBUG oslo_vmware.api [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819860, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.672752] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819857, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.766795] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52202412-5c71-4c30-e7c1-8b2d9b991ab7, 'name': SearchDatastore_Task, 'duration_secs': 0.043003} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.767077] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.767343] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a1f24cfe-88f0-4e73-9ade-2dcf907848a1/a1f24cfe-88f0-4e73-9ade-2dcf907848a1.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1355.767683] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7945b3d3-2ada-4370-b9e5-240337f6c68b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.775831] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1355.775831] env[63241]: value = "task-1819861" [ 1355.775831] env[63241]: _type = "Task" [ 1355.775831] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.785706] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819861, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.943278] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.944176] env[63241]: DEBUG nova.compute.manager [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1355.947590] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.657s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.949517] env[63241]: INFO nova.compute.claims [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1355.952921] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5f5b0aff-17f6-4308-90f3-aad40eaba4a6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "0e5447fd-a04f-4bc2-b329-e015883773b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.227s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.971691] env[63241]: DEBUG nova.virt.hardware [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1355.971996] env[63241]: DEBUG nova.virt.hardware [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1355.972145] env[63241]: DEBUG nova.virt.hardware [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1355.972327] env[63241]: DEBUG nova.virt.hardware [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1355.972787] env[63241]: DEBUG nova.virt.hardware [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1355.972889] env[63241]: DEBUG nova.virt.hardware [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1355.973116] env[63241]: DEBUG nova.virt.hardware [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1355.973330] env[63241]: DEBUG nova.virt.hardware [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1355.973609] env[63241]: DEBUG nova.virt.hardware [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1355.973833] env[63241]: DEBUG nova.virt.hardware [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1355.974015] env[63241]: DEBUG nova.virt.hardware [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1355.974978] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d65efc-7452-4aef-b491-f9f0f61d4fbd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.990962] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84358098-ce77-4f7d-a1a7-2441f675cd1f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.010279] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1356.021388] env[63241]: DEBUG oslo.service.loopingcall [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1356.022831] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4182e53-50db-4256-b376-b00100778935] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1356.023274] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2702676-e44e-4457-9cd5-e4dd457d499e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.058275] env[63241]: INFO nova.compute.manager [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Took 20.63 seconds to build instance. [ 1356.058985] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1356.058985] env[63241]: value = "task-1819862" [ 1356.058985] env[63241]: _type = "Task" [ 1356.058985] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.067998] env[63241]: DEBUG oslo_vmware.api [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Task: {'id': task-1819860, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.388253} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.068545] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1356.068773] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1356.068945] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1356.069074] env[63241]: INFO nova.compute.manager [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1356.069311] env[63241]: DEBUG oslo.service.loopingcall [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1356.072193] env[63241]: DEBUG nova.compute.manager [-] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1356.072292] env[63241]: DEBUG nova.network.neutron [-] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1356.080122] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819862, 'name': CreateVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.119023] env[63241]: DEBUG nova.network.neutron [-] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1356.172755] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819857, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.287746] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819861, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.456159] env[63241]: DEBUG nova.compute.utils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1356.462195] env[63241]: DEBUG nova.compute.manager [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1356.462676] env[63241]: DEBUG nova.network.neutron [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1356.468077] env[63241]: DEBUG nova.compute.manager [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1356.501019] env[63241]: DEBUG nova.network.neutron [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Updated VIF entry in instance network info cache for port 4388c54d-69af-4eb2-8f0b-e40773bf2e95. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1356.501374] env[63241]: DEBUG nova.network.neutron [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Updating instance_info_cache with network_info: [{"id": "4388c54d-69af-4eb2-8f0b-e40773bf2e95", "address": "fa:16:3e:27:1f:ac", "network": {"id": "76566fa9-e905-4509-ba1e-02e3aa059cae", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1889079318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69f026988ee840638c4d1163b629d7cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4388c54d-69", "ovs_interfaceid": "4388c54d-69af-4eb2-8f0b-e40773bf2e95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1356.556058] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquiring lock "0440c0a8-f065-4a82-b190-33279e7c0d93" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.556380] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Lock "0440c0a8-f065-4a82-b190-33279e7c0d93" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.556693] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquiring lock "0440c0a8-f065-4a82-b190-33279e7c0d93-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.556975] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Lock "0440c0a8-f065-4a82-b190-33279e7c0d93-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.557552] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Lock "0440c0a8-f065-4a82-b190-33279e7c0d93-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.561354] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d8124676-b023-4687-9708-3d84f284b1a2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.157s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.561354] env[63241]: INFO nova.compute.manager [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Terminating instance [ 1356.565588] env[63241]: DEBUG nova.compute.manager [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1356.565588] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1356.569312] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928fbf4d-0b09-4665-8500-865d5d071165 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.580119] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819862, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.582705] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1356.583778] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-498f736b-3fcd-4b3d-aa18-13be793678e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.592879] env[63241]: DEBUG oslo_vmware.api [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for the task: (returnval){ [ 1356.592879] env[63241]: value = "task-1819863" [ 1356.592879] env[63241]: _type = "Task" [ 1356.592879] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.605697] env[63241]: DEBUG oslo_vmware.api [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819863, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.611661] env[63241]: DEBUG nova.policy [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3918943329014fa9b2de646fed7d2714', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fedeb3768ebc4b96bd5a85bfb0a03cf8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1356.618323] env[63241]: DEBUG nova.network.neutron [-] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1356.673729] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819857, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.691937} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.674045] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 97890eda-0c1d-4423-acd2-60d3097c6f8a/97890eda-0c1d-4423-acd2-60d3097c6f8a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1356.674433] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1356.674556] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-944a2b21-b435-416b-8f25-38a200f8c528 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.682201] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for the task: (returnval){ [ 1356.682201] env[63241]: value = "task-1819864" [ 1356.682201] env[63241]: _type = "Task" [ 1356.682201] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.692882] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819864, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.789891] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819861, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.968703] env[63241]: DEBUG nova.compute.manager [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1357.007239] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.007239] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Releasing lock "refresh_cache-97890eda-0c1d-4423-acd2-60d3097c6f8a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.007239] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Received event network-vif-plugged-61bceac0-2e58-4bc3-92f6-c421aabdfc8b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1357.007239] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Acquiring lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.007239] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.007435] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.007435] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] No waiting events found dispatching network-vif-plugged-61bceac0-2e58-4bc3-92f6-c421aabdfc8b {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1357.007435] env[63241]: WARNING nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Received unexpected event network-vif-plugged-61bceac0-2e58-4bc3-92f6-c421aabdfc8b for instance with vm_state building and task_state spawning. [ 1357.007435] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Received event network-changed-61bceac0-2e58-4bc3-92f6-c421aabdfc8b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1357.007435] env[63241]: DEBUG nova.compute.manager [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Refreshing instance network info cache due to event network-changed-61bceac0-2e58-4bc3-92f6-c421aabdfc8b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1357.007588] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Acquiring lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1357.007588] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Acquired lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.007588] env[63241]: DEBUG nova.network.neutron [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Refreshing network info cache for port 61bceac0-2e58-4bc3-92f6-c421aabdfc8b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1357.075140] env[63241]: DEBUG nova.compute.manager [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1357.081023] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819862, 'name': CreateVM_Task, 'duration_secs': 0.606505} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.081023] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4182e53-50db-4256-b376-b00100778935] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1357.081699] env[63241]: DEBUG oslo_concurrency.lockutils [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1357.081834] env[63241]: DEBUG oslo_concurrency.lockutils [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.082096] env[63241]: DEBUG oslo_concurrency.lockutils [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1357.082574] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eb753e6-b135-4fb0-9975-7d470d49f446 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.088092] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Waiting for the task: (returnval){ [ 1357.088092] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522d7b8d-602c-935b-64de-2540a45d686b" [ 1357.088092] env[63241]: _type = "Task" [ 1357.088092] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.101479] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522d7b8d-602c-935b-64de-2540a45d686b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.109072] env[63241]: DEBUG oslo_vmware.api [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819863, 'name': PowerOffVM_Task, 'duration_secs': 0.297221} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.109357] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1357.109680] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1357.109764] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-115fc142-cddd-423e-b28a-1de50e01d2c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.124759] env[63241]: INFO nova.compute.manager [-] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Took 1.05 seconds to deallocate network for instance. [ 1357.196126] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819864, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076017} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.196858] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1357.197292] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f667f149-aba9-4b3f-9333-6f40fd98ee72 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.201458] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1357.201717] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1357.201964] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Deleting the datastore file [datastore1] 0440c0a8-f065-4a82-b190-33279e7c0d93 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1357.205065] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc9f7eb1-bca3-4dfb-abfa-cbc547210055 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.228772] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 97890eda-0c1d-4423-acd2-60d3097c6f8a/97890eda-0c1d-4423-acd2-60d3097c6f8a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1357.229810] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a661aa1-5287-468d-9be3-51796dffe32c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.249247] env[63241]: DEBUG oslo_vmware.api [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for the task: (returnval){ [ 1357.249247] env[63241]: value = "task-1819866" [ 1357.249247] env[63241]: _type = "Task" [ 1357.249247] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.257243] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for the task: (returnval){ [ 1357.257243] env[63241]: value = "task-1819867" [ 1357.257243] env[63241]: _type = "Task" [ 1357.257243] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.260536] env[63241]: DEBUG oslo_vmware.api [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.273540] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819867, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.288467] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819861, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.237622} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.288467] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a1f24cfe-88f0-4e73-9ade-2dcf907848a1/a1f24cfe-88f0-4e73-9ade-2dcf907848a1.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1357.288467] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1357.288857] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84864210-b076-4838-9325-dc428b31c862 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.298604] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1357.298604] env[63241]: value = "task-1819868" [ 1357.298604] env[63241]: _type = "Task" [ 1357.298604] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.309227] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819868, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.331236] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5d229b-68db-4504-abeb-189f6d5b28dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.339711] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa20454d-c925-461b-88cf-5baced18e05c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.370411] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129e1cd7-9c8a-48f3-9f55-f4f3a3246e17 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.379795] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73289b75-aff2-4262-aac1-a4b9c29b75c4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.393699] env[63241]: DEBUG nova.compute.provider_tree [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1357.501024] env[63241]: DEBUG nova.network.neutron [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Successfully created port: 46d3ef2e-5410-4151-8ec8-30a6f2e5e221 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1357.540615] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquiring lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.540777] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.599688] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522d7b8d-602c-935b-64de-2540a45d686b, 'name': SearchDatastore_Task, 'duration_secs': 0.027285} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.599688] env[63241]: DEBUG oslo_concurrency.lockutils [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1357.599902] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1357.600084] env[63241]: DEBUG oslo_concurrency.lockutils [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1357.600227] env[63241]: DEBUG oslo_concurrency.lockutils [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.600872] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1357.601683] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99980b25-67ce-4b0e-a118-5b4b723a388c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.604565] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.617870] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1357.618070] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1357.618827] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44399f73-92d0-439d-9169-366282df6ddf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.624783] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Waiting for the task: (returnval){ [ 1357.624783] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c0c8e7-86b7-cc72-8ef9-a6245b452b05" [ 1357.624783] env[63241]: _type = "Task" [ 1357.624783] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.633930] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.633930] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c0c8e7-86b7-cc72-8ef9-a6245b452b05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.761426] env[63241]: DEBUG oslo_vmware.api [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.773403] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819867, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.808722] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819868, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066843} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.810784] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1357.810784] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45651fb1-7ac4-4725-bcc3-198523bd7c69 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.834442] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] a1f24cfe-88f0-4e73-9ade-2dcf907848a1/a1f24cfe-88f0-4e73-9ade-2dcf907848a1.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1357.834981] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc27988a-6c48-4115-9935-03e729fa392c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.859430] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1357.859430] env[63241]: value = "task-1819869" [ 1357.859430] env[63241]: _type = "Task" [ 1357.859430] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.869347] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819869, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.899022] env[63241]: DEBUG nova.scheduler.client.report [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1357.987387] env[63241]: DEBUG nova.compute.manager [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1358.017686] env[63241]: DEBUG nova.virt.hardware [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:23:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1111409626',id=25,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1535560583',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1358.017959] env[63241]: DEBUG nova.virt.hardware [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1358.018132] env[63241]: DEBUG nova.virt.hardware [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1358.018314] env[63241]: DEBUG nova.virt.hardware [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1358.018459] env[63241]: DEBUG nova.virt.hardware [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1358.018604] env[63241]: DEBUG nova.virt.hardware [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1358.018899] env[63241]: DEBUG nova.virt.hardware [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1358.018979] env[63241]: DEBUG nova.virt.hardware [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1358.019203] env[63241]: DEBUG nova.virt.hardware [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1358.019301] env[63241]: DEBUG nova.virt.hardware [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1358.019482] env[63241]: DEBUG nova.virt.hardware [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1358.020386] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4c1a5b-6d6d-475e-9174-f8ac9f745b2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.029745] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9556c942-82ce-4357-8ae3-571baeb1b8e4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.142324] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c0c8e7-86b7-cc72-8ef9-a6245b452b05, 'name': SearchDatastore_Task, 'duration_secs': 0.040786} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.142324] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9d99d29-7973-493d-b87c-429469e691ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.148451] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Waiting for the task: (returnval){ [ 1358.148451] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527da417-9899-5b31-a99f-a8ca351bf4db" [ 1358.148451] env[63241]: _type = "Task" [ 1358.148451] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.159155] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527da417-9899-5b31-a99f-a8ca351bf4db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.176800] env[63241]: DEBUG nova.network.neutron [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Updated VIF entry in instance network info cache for port 61bceac0-2e58-4bc3-92f6-c421aabdfc8b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1358.177861] env[63241]: DEBUG nova.network.neutron [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Updating instance_info_cache with network_info: [{"id": "61bceac0-2e58-4bc3-92f6-c421aabdfc8b", "address": "fa:16:3e:22:0e:78", "network": {"id": "7f61cd58-880b-488b-8ae6-bc6d0b2a3806", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1752925058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1a99e9ac8f4fc0bbd763a9d91321af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61bceac0-2e", "ovs_interfaceid": "61bceac0-2e58-4bc3-92f6-c421aabdfc8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.264294] env[63241]: DEBUG oslo_vmware.api [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Task: {'id': task-1819866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.58187} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.270712] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1358.270712] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1358.270712] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1358.270712] env[63241]: INFO nova.compute.manager [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1358.270712] env[63241]: DEBUG oslo.service.loopingcall [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1358.271051] env[63241]: DEBUG nova.compute.manager [-] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1358.271051] env[63241]: DEBUG nova.network.neutron [-] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1358.280078] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819867, 'name': ReconfigVM_Task, 'duration_secs': 0.719368} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.280078] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 97890eda-0c1d-4423-acd2-60d3097c6f8a/97890eda-0c1d-4423-acd2-60d3097c6f8a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1358.280310] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4be6c5b9-476e-4515-b858-ecea56b604eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.286717] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for the task: (returnval){ [ 1358.286717] env[63241]: value = "task-1819870" [ 1358.286717] env[63241]: _type = "Task" [ 1358.286717] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.296352] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819870, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.372451] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819869, 'name': ReconfigVM_Task, 'duration_secs': 0.408462} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.373157] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Reconfigured VM instance instance-0000000a to attach disk [datastore1] a1f24cfe-88f0-4e73-9ade-2dcf907848a1/a1f24cfe-88f0-4e73-9ade-2dcf907848a1.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1358.373974] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bae2a438-62dc-4eda-9acb-6b1b068d9eb1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.382668] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1358.382668] env[63241]: value = "task-1819871" [ 1358.382668] env[63241]: _type = "Task" [ 1358.382668] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.392210] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819871, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.404181] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1358.404181] env[63241]: DEBUG nova.compute.manager [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1358.409076] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.681s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1358.410879] env[63241]: INFO nova.compute.claims [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1358.661200] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527da417-9899-5b31-a99f-a8ca351bf4db, 'name': SearchDatastore_Task, 'duration_secs': 0.014177} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.661551] env[63241]: DEBUG oslo_concurrency.lockutils [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.661856] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1358.662547] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31132f59-846e-49d8-b259-a1272ae82d20 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.669745] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Waiting for the task: (returnval){ [ 1358.669745] env[63241]: value = "task-1819872" [ 1358.669745] env[63241]: _type = "Task" [ 1358.669745] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.678513] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819872, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.681666] env[63241]: DEBUG oslo_concurrency.lockutils [req-b5b327a4-760d-41a2-a823-d2cc85e2ef9e req-fd170021-cc06-4558-91e3-30e0f84e61b6 service nova] Releasing lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.801361] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819870, 'name': Rename_Task, 'duration_secs': 0.280069} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.801855] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1358.802256] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cfa136c4-38e6-4901-a2f6-415d7c1b5dd9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.809276] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for the task: (returnval){ [ 1358.809276] env[63241]: value = "task-1819873" [ 1358.809276] env[63241]: _type = "Task" [ 1358.809276] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.820442] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819873, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.897994] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819871, 'name': Rename_Task, 'duration_secs': 0.27253} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.898232] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1358.898490] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab2ad0ad-d0a4-4ef6-b32f-609e75740048 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.909089] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1358.909089] env[63241]: value = "task-1819874" [ 1358.909089] env[63241]: _type = "Task" [ 1358.909089] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.921819] env[63241]: DEBUG nova.compute.utils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1358.922629] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819874, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.923263] env[63241]: DEBUG nova.compute.manager [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1358.923584] env[63241]: DEBUG nova.network.neutron [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1358.991592] env[63241]: DEBUG nova.policy [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53f691b52644488c832ce1224a079218', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e48fc59560ab47ae87be73ab11b13e7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1359.052545] env[63241]: DEBUG nova.compute.manager [req-f6e8a04d-4bae-4a61-8c4e-defb8e2ae9df req-6aea2b7c-48e8-42b9-8085-9f9d8d8c7eef service nova] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Received event network-vif-deleted-2295e83d-9394-4f35-be55-49b2eb1f271b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1359.181922] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819872, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45044} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.181922] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1359.181922] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1359.181922] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9ee4503-e0b5-4a05-9df6-2794218c219b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.188284] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Waiting for the task: (returnval){ [ 1359.188284] env[63241]: value = "task-1819875" [ 1359.188284] env[63241]: _type = "Task" [ 1359.188284] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.201690] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819875, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.205268] env[63241]: DEBUG nova.network.neutron [-] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.319848] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819873, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.420645] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819874, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.429181] env[63241]: DEBUG nova.compute.manager [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1359.467073] env[63241]: DEBUG nova.network.neutron [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Successfully created port: c5990c8b-c34c-4221-9e72-567817e9637d {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1359.709558] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819875, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096776} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.710128] env[63241]: INFO nova.compute.manager [-] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Took 1.44 seconds to deallocate network for instance. [ 1359.710210] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1359.712430] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45321187-59d0-4bfc-aa87-c7b383aabf30 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.745533] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1359.751136] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f384557-abf4-473c-8585-a86270053894 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.772329] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Waiting for the task: (returnval){ [ 1359.772329] env[63241]: value = "task-1819876" [ 1359.772329] env[63241]: _type = "Task" [ 1359.772329] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.786476] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819876, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.823918] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819873, 'name': PowerOnVM_Task} progress is 92%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.846678] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9055d6-aca3-4f88-9497-360dedc3f76f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.855308] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6ca09b-13df-4d40-9f58-c506e7e0baa7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.890967] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a072706d-29b1-416d-a193-16fdf4724ae5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.902156] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d7fe99-1679-411e-b663-76ec8b1742e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.918908] env[63241]: DEBUG nova.compute.provider_tree [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1359.929879] env[63241]: DEBUG oslo_vmware.api [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1819874, 'name': PowerOnVM_Task, 'duration_secs': 0.64195} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.930206] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1359.931265] env[63241]: INFO nova.compute.manager [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Took 10.59 seconds to spawn the instance on the hypervisor. [ 1359.931265] env[63241]: DEBUG nova.compute.manager [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1359.931848] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad50f2e-cedb-41b8-a172-f739aed2fcb9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.064577] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquiring lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.064577] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.226311] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.296088] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819876, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.323727] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819873, 'name': PowerOnVM_Task} progress is 92%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.426738] env[63241]: DEBUG nova.scheduler.client.report [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1360.448713] env[63241]: DEBUG nova.compute.manager [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1360.468163] env[63241]: INFO nova.compute.manager [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Took 21.24 seconds to build instance. [ 1360.487845] env[63241]: DEBUG nova.virt.hardware [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1360.488623] env[63241]: DEBUG nova.virt.hardware [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1360.488931] env[63241]: DEBUG nova.virt.hardware [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1360.489494] env[63241]: DEBUG nova.virt.hardware [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1360.489764] env[63241]: DEBUG nova.virt.hardware [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1360.489958] env[63241]: DEBUG nova.virt.hardware [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1360.490374] env[63241]: DEBUG nova.virt.hardware [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1360.490536] env[63241]: DEBUG nova.virt.hardware [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1360.490795] env[63241]: DEBUG nova.virt.hardware [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1360.491064] env[63241]: DEBUG nova.virt.hardware [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1360.492024] env[63241]: DEBUG nova.virt.hardware [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1360.494092] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1382fde2-e463-45e9-b749-909a32a37dee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.509126] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa56e70-0ab9-43b0-8423-b88dc0acc322 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.785727] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819876, 'name': ReconfigVM_Task, 'duration_secs': 0.904998} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.787176] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Reconfigured VM instance instance-00000002 to attach disk [datastore1] b4182e53-50db-4256-b376-b00100778935/b4182e53-50db-4256-b376-b00100778935.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1360.787176] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d7f77c7b-c099-4af9-ba80-4d473e39a6fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.796271] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Waiting for the task: (returnval){ [ 1360.796271] env[63241]: value = "task-1819877" [ 1360.796271] env[63241]: _type = "Task" [ 1360.796271] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.805272] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819877, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.820426] env[63241]: DEBUG oslo_vmware.api [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819873, 'name': PowerOnVM_Task, 'duration_secs': 1.711594} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.820697] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1360.820913] env[63241]: INFO nova.compute.manager [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Took 13.98 seconds to spawn the instance on the hypervisor. [ 1360.821098] env[63241]: DEBUG nova.compute.manager [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1360.821929] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd8077d-9681-438c-bb4f-39c516c9f742 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.929111] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquiring lock "41182989-2537-42f0-8c37-792b8b2c5206" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.929831] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "41182989-2537-42f0-8c37-792b8b2c5206" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.936120] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.529s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1360.936673] env[63241]: DEBUG nova.compute.manager [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1360.941514] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.198s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.941877] env[63241]: DEBUG nova.objects.instance [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1360.972276] env[63241]: DEBUG oslo_concurrency.lockutils [None req-88c7b8c8-d1fb-466c-a8d9-80a2d2487e0e tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.761s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.308389] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819877, 'name': Rename_Task, 'duration_secs': 0.225761} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.308389] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1361.308389] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d881d5f1-15f9-40d5-9cf6-774d430968a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.314955] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Waiting for the task: (returnval){ [ 1361.314955] env[63241]: value = "task-1819878" [ 1361.314955] env[63241]: _type = "Task" [ 1361.314955] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.325635] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819878, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.344915] env[63241]: INFO nova.compute.manager [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Took 22.38 seconds to build instance. [ 1361.445900] env[63241]: DEBUG nova.compute.utils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1361.451638] env[63241]: DEBUG nova.compute.manager [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1361.453340] env[63241]: DEBUG nova.network.neutron [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1361.474687] env[63241]: DEBUG nova.compute.manager [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1361.518019] env[63241]: DEBUG nova.policy [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd12e2d63c2a54557840ea6d2110871dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5925758ee5404bbba0f8c9678fcd1eef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1361.832735] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819878, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.848757] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee2eb562-518a-422a-8454-cbb0f8a8f35b tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Lock "97890eda-0c1d-4423-acd2-60d3097c6f8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.901s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.960530] env[63241]: DEBUG nova.compute.manager [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1361.967206] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6f8137cf-e2a9-4d1f-9fa4-b13f0f6f75a1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.968851] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.082s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.971598] env[63241]: INFO nova.compute.claims [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1361.983315] env[63241]: DEBUG nova.network.neutron [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Successfully created port: 279c7c67-cf23-442a-accf-544adeda8d12 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1362.005510] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.250682] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.250990] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.327559] env[63241]: DEBUG oslo_vmware.api [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Task: {'id': task-1819878, 'name': PowerOnVM_Task, 'duration_secs': 0.7939} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.328247] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1362.328569] env[63241]: DEBUG nova.compute.manager [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1362.329465] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877dcf4c-17ec-48a0-95e8-e006b6555c9f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.353796] env[63241]: DEBUG nova.compute.manager [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1362.412985] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "d60c3a22-19fb-4826-be88-d0307810a079" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.413769] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "d60c3a22-19fb-4826-be88-d0307810a079" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.439164] env[63241]: DEBUG nova.network.neutron [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Successfully updated port: 46d3ef2e-5410-4151-8ec8-30a6f2e5e221 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1362.495503] env[63241]: DEBUG nova.network.neutron [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Successfully updated port: c5990c8b-c34c-4221-9e72-567817e9637d {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1362.848754] env[63241]: DEBUG oslo_concurrency.lockutils [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.883300] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.941534] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "refresh_cache-44508cc6-c576-4c30-8559-75118ceba02a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1362.941534] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquired lock "refresh_cache-44508cc6-c576-4c30-8559-75118ceba02a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.941534] env[63241]: DEBUG nova.network.neutron [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1362.951134] env[63241]: DEBUG nova.compute.manager [req-231524f0-d4be-4be9-913b-9e0f36b63417 req-4bf4ea56-dbee-46a9-8f71-0b0a0c8dfe3f service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Received event network-changed-39b9ee92-fa8c-4018-be8f-6ad78d44a1a8 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1362.951322] env[63241]: DEBUG nova.compute.manager [req-231524f0-d4be-4be9-913b-9e0f36b63417 req-4bf4ea56-dbee-46a9-8f71-0b0a0c8dfe3f service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Refreshing instance network info cache due to event network-changed-39b9ee92-fa8c-4018-be8f-6ad78d44a1a8. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1362.955075] env[63241]: DEBUG oslo_concurrency.lockutils [req-231524f0-d4be-4be9-913b-9e0f36b63417 req-4bf4ea56-dbee-46a9-8f71-0b0a0c8dfe3f service nova] Acquiring lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1362.955075] env[63241]: DEBUG oslo_concurrency.lockutils [req-231524f0-d4be-4be9-913b-9e0f36b63417 req-4bf4ea56-dbee-46a9-8f71-0b0a0c8dfe3f service nova] Acquired lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.955075] env[63241]: DEBUG nova.network.neutron [req-231524f0-d4be-4be9-913b-9e0f36b63417 req-4bf4ea56-dbee-46a9-8f71-0b0a0c8dfe3f service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Refreshing network info cache for port 39b9ee92-fa8c-4018-be8f-6ad78d44a1a8 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1362.974610] env[63241]: DEBUG nova.compute.manager [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1363.002534] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "refresh_cache-99eccbef-0e76-4532-af2f-5d74e563e1d2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1363.002708] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "refresh_cache-99eccbef-0e76-4532-af2f-5d74e563e1d2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.003120] env[63241]: DEBUG nova.network.neutron [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1363.018606] env[63241]: DEBUG nova.virt.hardware [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1363.019125] env[63241]: DEBUG nova.virt.hardware [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1363.019353] env[63241]: DEBUG nova.virt.hardware [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1363.019612] env[63241]: DEBUG nova.virt.hardware [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1363.019806] env[63241]: DEBUG nova.virt.hardware [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1363.020021] env[63241]: DEBUG nova.virt.hardware [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1363.020322] env[63241]: DEBUG nova.virt.hardware [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1363.020545] env[63241]: DEBUG nova.virt.hardware [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1363.020791] env[63241]: DEBUG nova.virt.hardware [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1363.021014] env[63241]: DEBUG nova.virt.hardware [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1363.021263] env[63241]: DEBUG nova.virt.hardware [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1363.024025] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbc65ae-3451-4d97-b8fa-63f8b4abe0a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.036081] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49763f0-7d3e-496a-b301-a89e8bb5e3c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.277439] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.277914] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.387394] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ffa7409-586d-4105-b6ed-80765b4dd354 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.396011] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff0bfc4-a450-4ebc-9eba-8125ef10331e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.439420] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d435681-130e-41f5-b221-3326c1892ec2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.448479] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57569dd1-7a3d-4705-99cc-18138257bd86 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.465179] env[63241]: DEBUG nova.compute.provider_tree [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1363.543706] env[63241]: DEBUG nova.network.neutron [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1363.761762] env[63241]: DEBUG nova.network.neutron [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1363.811659] env[63241]: DEBUG nova.network.neutron [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Updating instance_info_cache with network_info: [{"id": "c5990c8b-c34c-4221-9e72-567817e9637d", "address": "fa:16:3e:80:62:bf", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5990c8b-c3", "ovs_interfaceid": "c5990c8b-c34c-4221-9e72-567817e9637d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.972018] env[63241]: DEBUG nova.scheduler.client.report [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1364.157693] env[63241]: DEBUG nova.network.neutron [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Successfully updated port: 279c7c67-cf23-442a-accf-544adeda8d12 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1364.236694] env[63241]: DEBUG nova.network.neutron [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Updating instance_info_cache with network_info: [{"id": "46d3ef2e-5410-4151-8ec8-30a6f2e5e221", "address": "fa:16:3e:47:8a:d3", "network": {"id": "a8367b18-022c-41b4-8c92-d1415c31263d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2039791152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fedeb3768ebc4b96bd5a85bfb0a03cf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46d3ef2e-54", "ovs_interfaceid": "46d3ef2e-5410-4151-8ec8-30a6f2e5e221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.303145] env[63241]: DEBUG nova.network.neutron [req-231524f0-d4be-4be9-913b-9e0f36b63417 req-4bf4ea56-dbee-46a9-8f71-0b0a0c8dfe3f service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Updated VIF entry in instance network info cache for port 39b9ee92-fa8c-4018-be8f-6ad78d44a1a8. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1364.303242] env[63241]: DEBUG nova.network.neutron [req-231524f0-d4be-4be9-913b-9e0f36b63417 req-4bf4ea56-dbee-46a9-8f71-0b0a0c8dfe3f service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Updating instance_info_cache with network_info: [{"id": "39b9ee92-fa8c-4018-be8f-6ad78d44a1a8", "address": "fa:16:3e:02:28:3a", "network": {"id": "66538b1c-dfa7-4a9a-84ed-9775e692d300", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1045273516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06bbbe738ef34806971a4883b7bb3cc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39b9ee92-fa", "ovs_interfaceid": "39b9ee92-fa8c-4018-be8f-6ad78d44a1a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.317014] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "refresh_cache-99eccbef-0e76-4532-af2f-5d74e563e1d2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.317493] env[63241]: DEBUG nova.compute.manager [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Instance network_info: |[{"id": "c5990c8b-c34c-4221-9e72-567817e9637d", "address": "fa:16:3e:80:62:bf", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5990c8b-c3", "ovs_interfaceid": "c5990c8b-c34c-4221-9e72-567817e9637d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1364.318067] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:62:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5990c8b-c34c-4221-9e72-567817e9637d', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1364.328125] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating folder: Project (e48fc59560ab47ae87be73ab11b13e7c). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1364.329113] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfe58efd-17a5-4539-8a76-281101344da4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.345475] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Created folder: Project (e48fc59560ab47ae87be73ab11b13e7c) in parent group-v376927. [ 1364.345475] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating folder: Instances. Parent ref: group-v376962. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1364.345475] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-741fe1a2-aea3-453b-8fb2-54146a9b0f0f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.355803] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Created folder: Instances in parent group-v376962. [ 1364.355803] env[63241]: DEBUG oslo.service.loopingcall [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1364.355803] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1364.355803] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e47ea671-73a7-4c45-b2f5-8c8d3f3d5991 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.376864] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1364.376864] env[63241]: value = "task-1819881" [ 1364.376864] env[63241]: _type = "Task" [ 1364.376864] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.385962] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819881, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.477328] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.478338] env[63241]: DEBUG nova.compute.manager [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1364.482982] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.666s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.484505] env[63241]: INFO nova.compute.claims [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1364.659846] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquiring lock "refresh_cache-a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.660022] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquired lock "refresh_cache-a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.660171] env[63241]: DEBUG nova.network.neutron [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1364.740775] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Releasing lock "refresh_cache-44508cc6-c576-4c30-8559-75118ceba02a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.740775] env[63241]: DEBUG nova.compute.manager [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Instance network_info: |[{"id": "46d3ef2e-5410-4151-8ec8-30a6f2e5e221", "address": "fa:16:3e:47:8a:d3", "network": {"id": "a8367b18-022c-41b4-8c92-d1415c31263d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2039791152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fedeb3768ebc4b96bd5a85bfb0a03cf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46d3ef2e-54", "ovs_interfaceid": "46d3ef2e-5410-4151-8ec8-30a6f2e5e221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1364.740908] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:8a:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46d3ef2e-5410-4151-8ec8-30a6f2e5e221', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1364.749464] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Creating folder: Project (fedeb3768ebc4b96bd5a85bfb0a03cf8). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1364.750159] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef7c12e5-3f51-4c8f-a9d6-f28695126264 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.760773] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Created folder: Project (fedeb3768ebc4b96bd5a85bfb0a03cf8) in parent group-v376927. [ 1364.761348] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Creating folder: Instances. Parent ref: group-v376965. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1364.761426] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b695581-4196-4639-bdb7-e6549cd76a30 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.771754] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Created folder: Instances in parent group-v376965. [ 1364.772020] env[63241]: DEBUG oslo.service.loopingcall [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1364.772216] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1364.772566] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9db3009-764e-4df7-b218-c891863787b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.792858] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1364.792858] env[63241]: value = "task-1819884" [ 1364.792858] env[63241]: _type = "Task" [ 1364.792858] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.803612] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819884, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.807761] env[63241]: DEBUG oslo_concurrency.lockutils [req-231524f0-d4be-4be9-913b-9e0f36b63417 req-4bf4ea56-dbee-46a9-8f71-0b0a0c8dfe3f service nova] Releasing lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.808045] env[63241]: DEBUG nova.compute.manager [req-231524f0-d4be-4be9-913b-9e0f36b63417 req-4bf4ea56-dbee-46a9-8f71-0b0a0c8dfe3f service nova] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Received event network-vif-deleted-7168ddd9-eca4-4ba1-a734-ef4f493aa646 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1364.888766] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819881, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.989804] env[63241]: DEBUG nova.compute.utils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1364.991335] env[63241]: DEBUG nova.compute.manager [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1364.991592] env[63241]: DEBUG nova.network.neutron [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1365.003387] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "5060e745-08d0-429e-8780-bfdad7a29f30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.003692] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "5060e745-08d0-429e-8780-bfdad7a29f30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.122126] env[63241]: DEBUG nova.policy [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d17f292161c4b54b12cbc9291512ffa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bfb1bfc0a12b45cf84d18d038c94fe3a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1365.251081] env[63241]: DEBUG nova.network.neutron [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1365.306400] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819884, 'name': CreateVM_Task, 'duration_secs': 0.407538} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.306477] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1365.307681] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.308112] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.308626] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1365.309039] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5817b460-26c2-4fce-8678-2eedec3564f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.315678] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1365.315678] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5234e779-76f4-d2af-6cd7-bf49b04a88b1" [ 1365.315678] env[63241]: _type = "Task" [ 1365.315678] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.327081] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5234e779-76f4-d2af-6cd7-bf49b04a88b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.390526] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819881, 'name': CreateVM_Task, 'duration_secs': 0.521192} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.390835] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1365.392798] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.445138] env[63241]: DEBUG nova.compute.manager [req-0e2279c6-43d5-400e-89fb-64424057e487 req-e96ea545-e828-461e-b47c-31afb58ed6f7 service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Received event network-vif-plugged-279c7c67-cf23-442a-accf-544adeda8d12 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1365.445309] env[63241]: DEBUG oslo_concurrency.lockutils [req-0e2279c6-43d5-400e-89fb-64424057e487 req-e96ea545-e828-461e-b47c-31afb58ed6f7 service nova] Acquiring lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.445547] env[63241]: DEBUG oslo_concurrency.lockutils [req-0e2279c6-43d5-400e-89fb-64424057e487 req-e96ea545-e828-461e-b47c-31afb58ed6f7 service nova] Lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.445751] env[63241]: DEBUG oslo_concurrency.lockutils [req-0e2279c6-43d5-400e-89fb-64424057e487 req-e96ea545-e828-461e-b47c-31afb58ed6f7 service nova] Lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1365.446483] env[63241]: DEBUG nova.compute.manager [req-0e2279c6-43d5-400e-89fb-64424057e487 req-e96ea545-e828-461e-b47c-31afb58ed6f7 service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] No waiting events found dispatching network-vif-plugged-279c7c67-cf23-442a-accf-544adeda8d12 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1365.446483] env[63241]: WARNING nova.compute.manager [req-0e2279c6-43d5-400e-89fb-64424057e487 req-e96ea545-e828-461e-b47c-31afb58ed6f7 service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Received unexpected event network-vif-plugged-279c7c67-cf23-442a-accf-544adeda8d12 for instance with vm_state building and task_state spawning. [ 1365.494880] env[63241]: DEBUG nova.compute.manager [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1365.723449] env[63241]: DEBUG nova.network.neutron [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Updating instance_info_cache with network_info: [{"id": "279c7c67-cf23-442a-accf-544adeda8d12", "address": "fa:16:3e:b8:9d:da", "network": {"id": "3adeb608-f93c-4422-b870-1d1726c199e2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1887376543-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5925758ee5404bbba0f8c9678fcd1eef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap279c7c67-cf", "ovs_interfaceid": "279c7c67-cf23-442a-accf-544adeda8d12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.834475] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5234e779-76f4-d2af-6cd7-bf49b04a88b1, 'name': SearchDatastore_Task, 'duration_secs': 0.023916} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.834903] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.835174] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1365.835429] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1365.835623] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.835769] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1365.838357] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1365.838699] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1365.838942] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80093000-a5d5-40cc-ac49-0cd4bf6d46cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.840910] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-338a2dec-5ba5-4bca-9523-b4f0fdff853d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.847052] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1365.847052] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5244327d-f6d3-6467-4654-ce16d8b0688c" [ 1365.847052] env[63241]: _type = "Task" [ 1365.847052] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.857609] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1365.857860] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1365.858623] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5244327d-f6d3-6467-4654-ce16d8b0688c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.858772] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff19a337-8848-48a4-9a5b-ea3c4a3c0c8d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.866039] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1365.866039] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528d949a-0a23-a7f3-0c6c-f2459c44419f" [ 1365.866039] env[63241]: _type = "Task" [ 1365.866039] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.879163] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528d949a-0a23-a7f3-0c6c-f2459c44419f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.940564] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b2f33b-252c-4e54-81df-28ae22752e08 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.951879] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46524283-865d-4398-b967-ae9698dd540d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.997169] env[63241]: DEBUG nova.network.neutron [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Successfully created port: 1e0eeec7-9caf-4069-8cad-d1d0d038ea2b {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1365.999277] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquiring lock "b4182e53-50db-4256-b376-b00100778935" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.999277] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "b4182e53-50db-4256-b376-b00100778935" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.999679] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquiring lock "b4182e53-50db-4256-b376-b00100778935-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.999679] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "b4182e53-50db-4256-b376-b00100778935-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.001121] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "b4182e53-50db-4256-b376-b00100778935-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.003407] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b07ec2-e719-444a-9148-64030b96aeb3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.015181] env[63241]: INFO nova.compute.manager [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Terminating instance [ 1366.015181] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquiring lock "refresh_cache-b4182e53-50db-4256-b376-b00100778935" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.015181] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquired lock "refresh_cache-b4182e53-50db-4256-b376-b00100778935" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.015181] env[63241]: DEBUG nova.network.neutron [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1366.026485] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ef1c1c-6d10-4cc8-a868-a26335a9232e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.047469] env[63241]: DEBUG nova.compute.provider_tree [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1366.171432] env[63241]: DEBUG nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Received event network-vif-plugged-46d3ef2e-5410-4151-8ec8-30a6f2e5e221 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1366.171618] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Acquiring lock "44508cc6-c576-4c30-8559-75118ceba02a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.171946] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Lock "44508cc6-c576-4c30-8559-75118ceba02a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.174472] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Lock "44508cc6-c576-4c30-8559-75118ceba02a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.174472] env[63241]: DEBUG nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] No waiting events found dispatching network-vif-plugged-46d3ef2e-5410-4151-8ec8-30a6f2e5e221 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1366.174472] env[63241]: WARNING nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Received unexpected event network-vif-plugged-46d3ef2e-5410-4151-8ec8-30a6f2e5e221 for instance with vm_state building and task_state spawning. [ 1366.174472] env[63241]: DEBUG nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Received event network-vif-plugged-c5990c8b-c34c-4221-9e72-567817e9637d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1366.174472] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Acquiring lock "99eccbef-0e76-4532-af2f-5d74e563e1d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.174782] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Lock "99eccbef-0e76-4532-af2f-5d74e563e1d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.174782] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Lock "99eccbef-0e76-4532-af2f-5d74e563e1d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.174782] env[63241]: DEBUG nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] No waiting events found dispatching network-vif-plugged-c5990c8b-c34c-4221-9e72-567817e9637d {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1366.174782] env[63241]: WARNING nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Received unexpected event network-vif-plugged-c5990c8b-c34c-4221-9e72-567817e9637d for instance with vm_state building and task_state spawning. [ 1366.174782] env[63241]: DEBUG nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Received event network-changed-46d3ef2e-5410-4151-8ec8-30a6f2e5e221 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1366.174996] env[63241]: DEBUG nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Refreshing instance network info cache due to event network-changed-46d3ef2e-5410-4151-8ec8-30a6f2e5e221. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1366.174996] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Acquiring lock "refresh_cache-44508cc6-c576-4c30-8559-75118ceba02a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.175089] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Acquired lock "refresh_cache-44508cc6-c576-4c30-8559-75118ceba02a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.175239] env[63241]: DEBUG nova.network.neutron [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Refreshing network info cache for port 46d3ef2e-5410-4151-8ec8-30a6f2e5e221 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1366.226449] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Releasing lock "refresh_cache-a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.227034] env[63241]: DEBUG nova.compute.manager [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Instance network_info: |[{"id": "279c7c67-cf23-442a-accf-544adeda8d12", "address": "fa:16:3e:b8:9d:da", "network": {"id": "3adeb608-f93c-4422-b870-1d1726c199e2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1887376543-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5925758ee5404bbba0f8c9678fcd1eef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap279c7c67-cf", "ovs_interfaceid": "279c7c67-cf23-442a-accf-544adeda8d12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1366.227199] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:9d:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd098b1c-636f-492d-b5ae-037cb0cae454', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '279c7c67-cf23-442a-accf-544adeda8d12', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1366.235017] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Creating folder: Project (5925758ee5404bbba0f8c9678fcd1eef). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1366.235809] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0facab6e-3ffe-4acd-8e85-d760faef432c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.248187] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Created folder: Project (5925758ee5404bbba0f8c9678fcd1eef) in parent group-v376927. [ 1366.248187] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Creating folder: Instances. Parent ref: group-v376968. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1366.248187] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7442db0-3abf-4763-9214-0e8ec7d8962d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.256971] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Created folder: Instances in parent group-v376968. [ 1366.257049] env[63241]: DEBUG oslo.service.loopingcall [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1366.257231] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1366.257490] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebb2b13a-4937-4f69-9965-8782dd523e6d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.285280] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1366.285280] env[63241]: value = "task-1819887" [ 1366.285280] env[63241]: _type = "Task" [ 1366.285280] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.297741] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819887, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.358993] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5244327d-f6d3-6467-4654-ce16d8b0688c, 'name': SearchDatastore_Task, 'duration_secs': 0.020715} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.359363] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.359845] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1366.360098] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.380072] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528d949a-0a23-a7f3-0c6c-f2459c44419f, 'name': SearchDatastore_Task, 'duration_secs': 0.026579} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.381012] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7974449-57f0-4e3d-977e-8102a8bc70e3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.387405] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1366.387405] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520d7046-0292-3dfb-5937-b2378daf778d" [ 1366.387405] env[63241]: _type = "Task" [ 1366.387405] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.395906] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520d7046-0292-3dfb-5937-b2378daf778d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.514424] env[63241]: DEBUG nova.compute.manager [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1366.543197] env[63241]: DEBUG nova.virt.hardware [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1366.543493] env[63241]: DEBUG nova.virt.hardware [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1366.543681] env[63241]: DEBUG nova.virt.hardware [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1366.543888] env[63241]: DEBUG nova.virt.hardware [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1366.544314] env[63241]: DEBUG nova.virt.hardware [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1366.544552] env[63241]: DEBUG nova.virt.hardware [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1366.544830] env[63241]: DEBUG nova.virt.hardware [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1366.544949] env[63241]: DEBUG nova.virt.hardware [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1366.545132] env[63241]: DEBUG nova.virt.hardware [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1366.545292] env[63241]: DEBUG nova.virt.hardware [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1366.545495] env[63241]: DEBUG nova.virt.hardware [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1366.546840] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a2b7d2-9c77-4be6-9d3f-ca3ab4836807 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.550420] env[63241]: DEBUG nova.scheduler.client.report [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1366.559201] env[63241]: DEBUG nova.network.neutron [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1366.563061] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598d7c07-7ed7-4d94-bc4f-2fdfdb5e15c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.737723] env[63241]: DEBUG nova.network.neutron [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.799472] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819887, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.899587] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520d7046-0292-3dfb-5937-b2378daf778d, 'name': SearchDatastore_Task, 'duration_secs': 0.017023} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.899915] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.900209] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 44508cc6-c576-4c30-8559-75118ceba02a/44508cc6-c576-4c30-8559-75118ceba02a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1366.900543] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.900772] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1366.901051] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8aacb9b-3542-415e-93ac-09a57901d88b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.906301] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f86f0f5-fffd-4074-9853-1c0093a9b0ef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.914679] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1366.914679] env[63241]: value = "task-1819888" [ 1366.914679] env[63241]: _type = "Task" [ 1366.914679] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.919175] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1366.919368] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1366.920523] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95078319-a15c-4b17-afe8-a652eb18d176 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.930284] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.933579] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1366.933579] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529ed184-a5b3-b522-baeb-16d05af8e296" [ 1366.933579] env[63241]: _type = "Task" [ 1366.933579] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.942617] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529ed184-a5b3-b522-baeb-16d05af8e296, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.055858] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.056487] env[63241]: DEBUG nova.compute.manager [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1367.059788] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.163s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.062030] env[63241]: INFO nova.compute.claims [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1367.211393] env[63241]: DEBUG nova.network.neutron [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Updated VIF entry in instance network info cache for port 46d3ef2e-5410-4151-8ec8-30a6f2e5e221. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1367.211757] env[63241]: DEBUG nova.network.neutron [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Updating instance_info_cache with network_info: [{"id": "46d3ef2e-5410-4151-8ec8-30a6f2e5e221", "address": "fa:16:3e:47:8a:d3", "network": {"id": "a8367b18-022c-41b4-8c92-d1415c31263d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2039791152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fedeb3768ebc4b96bd5a85bfb0a03cf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46d3ef2e-54", "ovs_interfaceid": "46d3ef2e-5410-4151-8ec8-30a6f2e5e221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.240229] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Releasing lock "refresh_cache-b4182e53-50db-4256-b376-b00100778935" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.240702] env[63241]: DEBUG nova.compute.manager [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1367.241041] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1367.243687] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987ad5ba-9d13-4fc6-8e31-db8efc4ceb10 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.250943] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1367.251947] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66052408-9d69-412a-a901-54d8baa04d66 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.260956] env[63241]: DEBUG oslo_vmware.api [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1367.260956] env[63241]: value = "task-1819889" [ 1367.260956] env[63241]: _type = "Task" [ 1367.260956] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.267576] env[63241]: DEBUG oslo_vmware.api [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819889, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.296841] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819887, 'name': CreateVM_Task, 'duration_secs': 0.69021} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.297902] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1367.298770] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.299679] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.299679] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1367.300684] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d70bb38d-cc66-4c00-96fe-d59997865aed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.307070] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for the task: (returnval){ [ 1367.307070] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526011c0-49f8-2554-cc3d-08c3e953020c" [ 1367.307070] env[63241]: _type = "Task" [ 1367.307070] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.317142] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526011c0-49f8-2554-cc3d-08c3e953020c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.425869] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819888, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.446742] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529ed184-a5b3-b522-baeb-16d05af8e296, 'name': SearchDatastore_Task, 'duration_secs': 0.010172} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.447036] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07d6a9c7-d1c9-46fb-bf90-2e3faa792cf2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.453898] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1367.453898] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52de29cc-076a-d07b-0a2e-906b57752948" [ 1367.453898] env[63241]: _type = "Task" [ 1367.453898] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.463691] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52de29cc-076a-d07b-0a2e-906b57752948, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.571503] env[63241]: DEBUG nova.compute.utils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1367.580045] env[63241]: DEBUG nova.compute.manager [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1367.580045] env[63241]: DEBUG nova.network.neutron [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1367.635188] env[63241]: DEBUG nova.policy [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b2286e0bee4a40e99058009376f30820', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9b98d078fef845cf87f6d932885790e1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1367.716329] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Releasing lock "refresh_cache-44508cc6-c576-4c30-8559-75118ceba02a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.716329] env[63241]: DEBUG nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Received event network-changed-c5990c8b-c34c-4221-9e72-567817e9637d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1367.716329] env[63241]: DEBUG nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Refreshing instance network info cache due to event network-changed-c5990c8b-c34c-4221-9e72-567817e9637d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1367.716329] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Acquiring lock "refresh_cache-99eccbef-0e76-4532-af2f-5d74e563e1d2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.716505] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Acquired lock "refresh_cache-99eccbef-0e76-4532-af2f-5d74e563e1d2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.716850] env[63241]: DEBUG nova.network.neutron [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Refreshing network info cache for port c5990c8b-c34c-4221-9e72-567817e9637d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1367.772599] env[63241]: DEBUG oslo_vmware.api [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819889, 'name': PowerOffVM_Task, 'duration_secs': 0.165865} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.773059] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1367.773382] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1367.773785] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-843c89aa-083b-4adb-a4ae-671850e1ae80 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.803212] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1367.803212] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1367.803212] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Deleting the datastore file [datastore1] b4182e53-50db-4256-b376-b00100778935 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1367.803212] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76faa07c-85ea-49e8-a28a-6ff7eb0b13b4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.812670] env[63241]: DEBUG oslo_vmware.api [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for the task: (returnval){ [ 1367.812670] env[63241]: value = "task-1819891" [ 1367.812670] env[63241]: _type = "Task" [ 1367.812670] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.822022] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526011c0-49f8-2554-cc3d-08c3e953020c, 'name': SearchDatastore_Task, 'duration_secs': 0.056055} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.823083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.823545] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1367.823907] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.828585] env[63241]: DEBUG oslo_vmware.api [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819891, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.933279] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819888, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.623343} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.933582] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 44508cc6-c576-4c30-8559-75118ceba02a/44508cc6-c576-4c30-8559-75118ceba02a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1367.933842] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1367.934121] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bde90c25-2879-47e2-8c1a-48ec96692560 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.941875] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1367.941875] env[63241]: value = "task-1819892" [ 1367.941875] env[63241]: _type = "Task" [ 1367.941875] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.966750] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819892, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.969912] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52de29cc-076a-d07b-0a2e-906b57752948, 'name': SearchDatastore_Task, 'duration_secs': 0.043488} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.970837] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.970978] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 99eccbef-0e76-4532-af2f-5d74e563e1d2/99eccbef-0e76-4532-af2f-5d74e563e1d2.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1367.971379] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.971485] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1367.971733] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f642db3-132c-4c64-b8da-699ea16b8a96 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.974787] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b69842db-eb8d-4d1b-bc34-8a4f6f919392 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.982616] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1367.982616] env[63241]: value = "task-1819893" [ 1367.982616] env[63241]: _type = "Task" [ 1367.982616] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.983449] env[63241]: DEBUG nova.network.neutron [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Successfully created port: cd1c9913-c7c6-4258-9006-ee7987594482 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1367.986810] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1367.987089] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1367.992706] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-744316b8-aaac-4968-85a1-d3f790f57e45 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.001834] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for the task: (returnval){ [ 1368.001834] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528790d5-319c-a101-f9ca-80ec593edccc" [ 1368.001834] env[63241]: _type = "Task" [ 1368.001834] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.006313] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819893, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.016420] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528790d5-319c-a101-f9ca-80ec593edccc, 'name': SearchDatastore_Task, 'duration_secs': 0.009093} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.017652] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d283c869-4aea-4f9f-8eb0-e9b19c0ea2c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.024439] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for the task: (returnval){ [ 1368.024439] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5296a7ff-42a9-7cad-4ff5-b83a7ad0e20e" [ 1368.024439] env[63241]: _type = "Task" [ 1368.024439] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.032555] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5296a7ff-42a9-7cad-4ff5-b83a7ad0e20e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.087677] env[63241]: DEBUG nova.compute.manager [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1368.343730] env[63241]: DEBUG oslo_vmware.api [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Task: {'id': task-1819891, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091597} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.346568] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1368.346568] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1368.346568] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1368.346568] env[63241]: INFO nova.compute.manager [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] [instance: b4182e53-50db-4256-b376-b00100778935] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1368.346568] env[63241]: DEBUG oslo.service.loopingcall [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1368.346959] env[63241]: DEBUG nova.compute.manager [-] [instance: b4182e53-50db-4256-b376-b00100778935] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1368.346959] env[63241]: DEBUG nova.network.neutron [-] [instance: b4182e53-50db-4256-b376-b00100778935] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1368.390183] env[63241]: DEBUG nova.network.neutron [-] [instance: b4182e53-50db-4256-b376-b00100778935] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1368.454565] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819892, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084962} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.457208] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1368.460224] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2190d4-ad37-43d3-ae66-06275789f20a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.483243] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 44508cc6-c576-4c30-8559-75118ceba02a/44508cc6-c576-4c30-8559-75118ceba02a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1368.486385] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14ffcf75-cf66-431b-bcc7-2e6f41038a95 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.511531] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819893, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.422756} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.512850] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 99eccbef-0e76-4532-af2f-5d74e563e1d2/99eccbef-0e76-4532-af2f-5d74e563e1d2.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1368.512984] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1368.513273] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1368.513273] env[63241]: value = "task-1819894" [ 1368.513273] env[63241]: _type = "Task" [ 1368.513273] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.516429] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-248c42aa-2c9c-4ac0-a910-27506289bc72 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.533610] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1368.533610] env[63241]: value = "task-1819895" [ 1368.533610] env[63241]: _type = "Task" [ 1368.533610] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.533850] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819894, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.544623] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5296a7ff-42a9-7cad-4ff5-b83a7ad0e20e, 'name': SearchDatastore_Task, 'duration_secs': 0.008606} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.547872] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1368.548225] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a88ba00d-6644-4ecc-8603-a7d79ce8a4b4/a88ba00d-6644-4ecc-8603-a7d79ce8a4b4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1368.548483] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819895, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.550855] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d60504ca-de1f-4f65-8d73-f7a2409d92fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.558586] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for the task: (returnval){ [ 1368.558586] env[63241]: value = "task-1819896" [ 1368.558586] env[63241]: _type = "Task" [ 1368.558586] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.570259] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819896, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.629620] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2536871d-8ced-4fed-b2b0-4ffa6d908de5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.638986] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fdbcce-5892-4549-ba02-a59b163c9a4c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.676886] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1129dc-add6-4a52-8a2d-f9caa72259b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.685726] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5f6297-5dfa-4114-a6a2-75b84c47bf85 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.700972] env[63241]: DEBUG nova.compute.provider_tree [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1368.892773] env[63241]: DEBUG nova.network.neutron [-] [instance: b4182e53-50db-4256-b376-b00100778935] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.899804] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquiring lock "97890eda-0c1d-4423-acd2-60d3097c6f8a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.900116] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Lock "97890eda-0c1d-4423-acd2-60d3097c6f8a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.900448] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquiring lock "97890eda-0c1d-4423-acd2-60d3097c6f8a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.900549] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Lock "97890eda-0c1d-4423-acd2-60d3097c6f8a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.900658] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Lock "97890eda-0c1d-4423-acd2-60d3097c6f8a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.905609] env[63241]: INFO nova.compute.manager [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Terminating instance [ 1368.910725] env[63241]: DEBUG nova.compute.manager [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1368.910725] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1368.910725] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08057424-0101-41d5-aee9-e29a90e16026 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.919932] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1368.920215] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41526c72-1a44-44a4-9268-339218d20a50 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.928501] env[63241]: DEBUG oslo_vmware.api [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for the task: (returnval){ [ 1368.928501] env[63241]: value = "task-1819897" [ 1368.928501] env[63241]: _type = "Task" [ 1368.928501] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.945795] env[63241]: DEBUG oslo_vmware.api [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.028264] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819894, 'name': ReconfigVM_Task, 'duration_secs': 0.285596} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.028597] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 44508cc6-c576-4c30-8559-75118ceba02a/44508cc6-c576-4c30-8559-75118ceba02a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1369.029502] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b273a422-89df-4fdb-996f-4f63527624f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.036978] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1369.036978] env[63241]: value = "task-1819898" [ 1369.036978] env[63241]: _type = "Task" [ 1369.036978] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.051389] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819898, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.055016] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077281} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.055336] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1369.056362] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85293d8-d4ea-43c2-9da4-5b7d9899a6fa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.068536] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819896, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.088417] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 99eccbef-0e76-4532-af2f-5d74e563e1d2/99eccbef-0e76-4532-af2f-5d74e563e1d2.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1369.088993] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dac3e0b0-9c65-4a6e-b12e-941d6a0fc9eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.105609] env[63241]: DEBUG nova.compute.manager [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1369.113662] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1369.113662] env[63241]: value = "task-1819899" [ 1369.113662] env[63241]: _type = "Task" [ 1369.113662] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.126300] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819899, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.151860] env[63241]: DEBUG nova.virt.hardware [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1369.152210] env[63241]: DEBUG nova.virt.hardware [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1369.152421] env[63241]: DEBUG nova.virt.hardware [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1369.152643] env[63241]: DEBUG nova.virt.hardware [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1369.152822] env[63241]: DEBUG nova.virt.hardware [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1369.153032] env[63241]: DEBUG nova.virt.hardware [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1369.153286] env[63241]: DEBUG nova.virt.hardware [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1369.153490] env[63241]: DEBUG nova.virt.hardware [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1369.153724] env[63241]: DEBUG nova.virt.hardware [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1369.153933] env[63241]: DEBUG nova.virt.hardware [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1369.154161] env[63241]: DEBUG nova.virt.hardware [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1369.155118] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d1dc8f-7482-46d5-9ff3-a58d2d12759b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.164768] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a670300c-5e0d-4101-aabd-9df6711bf630 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.205239] env[63241]: DEBUG nova.scheduler.client.report [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1369.270585] env[63241]: DEBUG nova.network.neutron [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Successfully updated port: 1e0eeec7-9caf-4069-8cad-d1d0d038ea2b {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1369.292780] env[63241]: DEBUG nova.compute.manager [req-4959a215-ef61-4226-8c74-7c3052134e01 req-09b877f8-5a5b-4eb9-a115-fcf6dc22bde4 service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Received event network-changed-279c7c67-cf23-442a-accf-544adeda8d12 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1369.293100] env[63241]: DEBUG nova.compute.manager [req-4959a215-ef61-4226-8c74-7c3052134e01 req-09b877f8-5a5b-4eb9-a115-fcf6dc22bde4 service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Refreshing instance network info cache due to event network-changed-279c7c67-cf23-442a-accf-544adeda8d12. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1369.293419] env[63241]: DEBUG oslo_concurrency.lockutils [req-4959a215-ef61-4226-8c74-7c3052134e01 req-09b877f8-5a5b-4eb9-a115-fcf6dc22bde4 service nova] Acquiring lock "refresh_cache-a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.293638] env[63241]: DEBUG oslo_concurrency.lockutils [req-4959a215-ef61-4226-8c74-7c3052134e01 req-09b877f8-5a5b-4eb9-a115-fcf6dc22bde4 service nova] Acquired lock "refresh_cache-a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.294221] env[63241]: DEBUG nova.network.neutron [req-4959a215-ef61-4226-8c74-7c3052134e01 req-09b877f8-5a5b-4eb9-a115-fcf6dc22bde4 service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Refreshing network info cache for port 279c7c67-cf23-442a-accf-544adeda8d12 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1369.316102] env[63241]: DEBUG nova.network.neutron [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Updated VIF entry in instance network info cache for port c5990c8b-c34c-4221-9e72-567817e9637d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1369.316467] env[63241]: DEBUG nova.network.neutron [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Updating instance_info_cache with network_info: [{"id": "c5990c8b-c34c-4221-9e72-567817e9637d", "address": "fa:16:3e:80:62:bf", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5990c8b-c3", "ovs_interfaceid": "c5990c8b-c34c-4221-9e72-567817e9637d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.397226] env[63241]: INFO nova.compute.manager [-] [instance: b4182e53-50db-4256-b376-b00100778935] Took 1.05 seconds to deallocate network for instance. [ 1369.441462] env[63241]: DEBUG oslo_vmware.api [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819897, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.549315] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819898, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.570559] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819896, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.735795} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.570559] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a88ba00d-6644-4ecc-8603-a7d79ce8a4b4/a88ba00d-6644-4ecc-8603-a7d79ce8a4b4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1369.570559] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1369.570559] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76bf5979-9226-478b-bdeb-dcbc7974683a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.578926] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for the task: (returnval){ [ 1369.578926] env[63241]: value = "task-1819900" [ 1369.578926] env[63241]: _type = "Task" [ 1369.578926] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.585388] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819900, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.629254] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819899, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.649992] env[63241]: DEBUG nova.network.neutron [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Successfully updated port: cd1c9913-c7c6-4258-9006-ee7987594482 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1369.710826] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.651s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.711377] env[63241]: DEBUG nova.compute.manager [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1369.714058] env[63241]: DEBUG oslo_concurrency.lockutils [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.139s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.714244] env[63241]: DEBUG nova.objects.instance [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Lazy-loading 'resources' on Instance uuid bbb94f08-7df2-457e-bc5b-d0008839cf20 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1369.774607] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquiring lock "refresh_cache-3c51d4dc-5a2c-4483-9aa5-8bab532971d4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.775132] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquired lock "refresh_cache-3c51d4dc-5a2c-4483-9aa5-8bab532971d4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.775132] env[63241]: DEBUG nova.network.neutron [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1369.819733] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Releasing lock "refresh_cache-99eccbef-0e76-4532-af2f-5d74e563e1d2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1369.820069] env[63241]: DEBUG nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Received event network-changed-61bceac0-2e58-4bc3-92f6-c421aabdfc8b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1369.820242] env[63241]: DEBUG nova.compute.manager [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Refreshing instance network info cache due to event network-changed-61bceac0-2e58-4bc3-92f6-c421aabdfc8b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1369.820498] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Acquiring lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.820659] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Acquired lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.820851] env[63241]: DEBUG nova.network.neutron [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Refreshing network info cache for port 61bceac0-2e58-4bc3-92f6-c421aabdfc8b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1369.904266] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.906271] env[63241]: DEBUG nova.compute.manager [req-2addde78-f4ec-4d9e-ab96-b10c3b8891e2 req-efbd7b52-8513-4e35-b79a-571cfc845fb7 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Received event network-changed-61bceac0-2e58-4bc3-92f6-c421aabdfc8b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1369.906631] env[63241]: DEBUG nova.compute.manager [req-2addde78-f4ec-4d9e-ab96-b10c3b8891e2 req-efbd7b52-8513-4e35-b79a-571cfc845fb7 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Refreshing instance network info cache due to event network-changed-61bceac0-2e58-4bc3-92f6-c421aabdfc8b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1369.907626] env[63241]: DEBUG oslo_concurrency.lockutils [req-2addde78-f4ec-4d9e-ab96-b10c3b8891e2 req-efbd7b52-8513-4e35-b79a-571cfc845fb7 service nova] Acquiring lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.949617] env[63241]: DEBUG oslo_vmware.api [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819897, 'name': PowerOffVM_Task, 'duration_secs': 0.574279} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.950393] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1369.951933] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1369.951933] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e88b7202-5eda-4a4d-b180-ad4bbac73e8d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.047816] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1370.048050] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1370.048234] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Deleting the datastore file [datastore1] 97890eda-0c1d-4423-acd2-60d3097c6f8a {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1370.048492] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24e771ff-a088-4d32-818b-c993c323b13c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.057528] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819898, 'name': Rename_Task, 'duration_secs': 0.844707} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.058988] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1370.059398] env[63241]: DEBUG oslo_vmware.api [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for the task: (returnval){ [ 1370.059398] env[63241]: value = "task-1819902" [ 1370.059398] env[63241]: _type = "Task" [ 1370.059398] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.059398] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-093fce78-ee45-4e34-a25f-249e56ab9ae0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.074229] env[63241]: DEBUG oslo_vmware.api [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.074229] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1370.074229] env[63241]: value = "task-1819903" [ 1370.074229] env[63241]: _type = "Task" [ 1370.074229] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.090470] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819903, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.096841] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819900, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071282} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.096841] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1370.099325] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa9c3f5-85d1-4156-ada7-09a8c9fbc83d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.131795] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] a88ba00d-6644-4ecc-8603-a7d79ce8a4b4/a88ba00d-6644-4ecc-8603-a7d79ce8a4b4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1370.134607] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d01af0c-5e3e-4cf2-82ba-134de63f06e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.152564] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquiring lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.152564] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquired lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.152848] env[63241]: DEBUG nova.network.neutron [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1370.162805] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819899, 'name': ReconfigVM_Task, 'duration_secs': 0.927667} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.165309] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 99eccbef-0e76-4532-af2f-5d74e563e1d2/99eccbef-0e76-4532-af2f-5d74e563e1d2.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1370.165399] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for the task: (returnval){ [ 1370.165399] env[63241]: value = "task-1819904" [ 1370.165399] env[63241]: _type = "Task" [ 1370.165399] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.166422] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72e199bd-815b-488d-bf76-7e552e7ed180 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.183044] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819904, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.186941] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1370.186941] env[63241]: value = "task-1819905" [ 1370.186941] env[63241]: _type = "Task" [ 1370.186941] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.195646] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819905, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.219125] env[63241]: DEBUG nova.compute.utils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1370.222471] env[63241]: DEBUG nova.compute.manager [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1370.223147] env[63241]: DEBUG nova.network.neutron [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1370.257156] env[63241]: DEBUG nova.network.neutron [req-4959a215-ef61-4226-8c74-7c3052134e01 req-09b877f8-5a5b-4eb9-a115-fcf6dc22bde4 service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Updated VIF entry in instance network info cache for port 279c7c67-cf23-442a-accf-544adeda8d12. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1370.257519] env[63241]: DEBUG nova.network.neutron [req-4959a215-ef61-4226-8c74-7c3052134e01 req-09b877f8-5a5b-4eb9-a115-fcf6dc22bde4 service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Updating instance_info_cache with network_info: [{"id": "279c7c67-cf23-442a-accf-544adeda8d12", "address": "fa:16:3e:b8:9d:da", "network": {"id": "3adeb608-f93c-4422-b870-1d1726c199e2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1887376543-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5925758ee5404bbba0f8c9678fcd1eef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap279c7c67-cf", "ovs_interfaceid": "279c7c67-cf23-442a-accf-544adeda8d12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.342364] env[63241]: DEBUG nova.network.neutron [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1370.364762] env[63241]: DEBUG nova.policy [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e31706e8cd1a4bf790f23c31c71298a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b8ae8ca872a549a6918b0e060d4b4af0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1370.564503] env[63241]: DEBUG nova.network.neutron [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Updating instance_info_cache with network_info: [{"id": "1e0eeec7-9caf-4069-8cad-d1d0d038ea2b", "address": "fa:16:3e:e4:1a:ee", "network": {"id": "1cb2bfac-6339-4368-9554-14333bff6b76", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-149124339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfb1bfc0a12b45cf84d18d038c94fe3a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e0eeec7-9c", "ovs_interfaceid": "1e0eeec7-9caf-4069-8cad-d1d0d038ea2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.581919] env[63241]: DEBUG oslo_vmware.api [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Task: {'id': task-1819902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.342524} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.583403] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1370.583640] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1370.584141] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1370.584141] env[63241]: INFO nova.compute.manager [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1370.584305] env[63241]: DEBUG oslo.service.loopingcall [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1370.584723] env[63241]: DEBUG nova.compute.manager [-] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1370.584833] env[63241]: DEBUG nova.network.neutron [-] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1370.589767] env[63241]: DEBUG oslo_vmware.api [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1819903, 'name': PowerOnVM_Task, 'duration_secs': 0.518137} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.592597] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1370.592838] env[63241]: INFO nova.compute.manager [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Took 12.61 seconds to spawn the instance on the hypervisor. [ 1370.592971] env[63241]: DEBUG nova.compute.manager [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1370.596029] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9055a83-b2d4-4067-b3d8-0ecbb200db08 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.682048] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819904, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.698415] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819905, 'name': Rename_Task, 'duration_secs': 0.171718} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.698602] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1370.698690] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00511845-d48e-45f3-adc1-62f4c730daba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.705447] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1370.705447] env[63241]: value = "task-1819906" [ 1370.705447] env[63241]: _type = "Task" [ 1370.705447] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.718506] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819906, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.719588] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e27c7bf-d330-412d-86a3-d20216d2e5db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.722797] env[63241]: DEBUG nova.compute.manager [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1370.729971] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d7d528-af5c-4b1a-95aa-effadba325e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.776287] env[63241]: DEBUG oslo_concurrency.lockutils [req-4959a215-ef61-4226-8c74-7c3052134e01 req-09b877f8-5a5b-4eb9-a115-fcf6dc22bde4 service nova] Releasing lock "refresh_cache-a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.778113] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17533d6b-cbb6-4c9a-9729-ec94f2b44468 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.781841] env[63241]: DEBUG nova.network.neutron [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1370.793994] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f45e45-3821-4b0e-9891-3d91a211af33 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.810726] env[63241]: DEBUG nova.compute.provider_tree [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1371.039701] env[63241]: DEBUG nova.network.neutron [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Updating instance_info_cache with network_info: [{"id": "cd1c9913-c7c6-4258-9006-ee7987594482", "address": "fa:16:3e:93:7c:04", "network": {"id": "d2d9d4dd-5702-4a7f-a301-7f4d520edfbf", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1366579599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b98d078fef845cf87f6d932885790e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd1c9913-c7", "ovs_interfaceid": "cd1c9913-c7c6-4258-9006-ee7987594482", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.073023] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Releasing lock "refresh_cache-3c51d4dc-5a2c-4483-9aa5-8bab532971d4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1371.073379] env[63241]: DEBUG nova.compute.manager [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Instance network_info: |[{"id": "1e0eeec7-9caf-4069-8cad-d1d0d038ea2b", "address": "fa:16:3e:e4:1a:ee", "network": {"id": "1cb2bfac-6339-4368-9554-14333bff6b76", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-149124339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfb1bfc0a12b45cf84d18d038c94fe3a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e0eeec7-9c", "ovs_interfaceid": "1e0eeec7-9caf-4069-8cad-d1d0d038ea2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1371.075811] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:1a:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e0eeec7-9caf-4069-8cad-d1d0d038ea2b', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1371.084104] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Creating folder: Project (bfb1bfc0a12b45cf84d18d038c94fe3a). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1371.084737] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-989cb8a7-594e-45f7-85a3-eac73631f8ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.097866] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Created folder: Project (bfb1bfc0a12b45cf84d18d038c94fe3a) in parent group-v376927. [ 1371.098088] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Creating folder: Instances. Parent ref: group-v376971. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1371.098334] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ee93c9d-df7e-4c90-ac6a-fc90069f3b16 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.114629] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Created folder: Instances in parent group-v376971. [ 1371.117841] env[63241]: DEBUG oslo.service.loopingcall [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1371.120326] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1371.120753] env[63241]: INFO nova.compute.manager [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Took 29.01 seconds to build instance. [ 1371.121630] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8deef40e-bece-45ef-9fd7-d93fe44c4466 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.144704] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ce3b74-3c6c-4251-b3cb-d6bc5f4c6b2a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "44508cc6-c576-4c30-8559-75118ceba02a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.045s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.151288] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1371.151288] env[63241]: value = "task-1819909" [ 1371.151288] env[63241]: _type = "Task" [ 1371.151288] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.164981] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819909, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.183137] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819904, 'name': ReconfigVM_Task, 'duration_secs': 0.637657} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.183674] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Reconfigured VM instance instance-0000000d to attach disk [datastore1] a88ba00d-6644-4ecc-8603-a7d79ce8a4b4/a88ba00d-6644-4ecc-8603-a7d79ce8a4b4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1371.184115] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d7a9f2d-140a-4e73-9a32-55e1d65aeca6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.193137] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for the task: (returnval){ [ 1371.193137] env[63241]: value = "task-1819910" [ 1371.193137] env[63241]: _type = "Task" [ 1371.193137] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.202234] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819910, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.210770] env[63241]: DEBUG nova.network.neutron [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Updated VIF entry in instance network info cache for port 61bceac0-2e58-4bc3-92f6-c421aabdfc8b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1371.211188] env[63241]: DEBUG nova.network.neutron [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Updating instance_info_cache with network_info: [{"id": "61bceac0-2e58-4bc3-92f6-c421aabdfc8b", "address": "fa:16:3e:22:0e:78", "network": {"id": "7f61cd58-880b-488b-8ae6-bc6d0b2a3806", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1752925058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1a99e9ac8f4fc0bbd763a9d91321af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61bceac0-2e", "ovs_interfaceid": "61bceac0-2e58-4bc3-92f6-c421aabdfc8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.221475] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819906, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.314536] env[63241]: DEBUG nova.scheduler.client.report [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1371.532970] env[63241]: DEBUG nova.network.neutron [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Successfully created port: 4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1371.543117] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Releasing lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1371.543707] env[63241]: DEBUG nova.compute.manager [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Instance network_info: |[{"id": "cd1c9913-c7c6-4258-9006-ee7987594482", "address": "fa:16:3e:93:7c:04", "network": {"id": "d2d9d4dd-5702-4a7f-a301-7f4d520edfbf", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1366579599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b98d078fef845cf87f6d932885790e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd1c9913-c7", "ovs_interfaceid": "cd1c9913-c7c6-4258-9006-ee7987594482", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1371.544208] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:7c:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c791d09c-1086-4ee1-bcde-6ca7d259cabd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd1c9913-c7c6-4258-9006-ee7987594482', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1371.553978] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Creating folder: Project (9b98d078fef845cf87f6d932885790e1). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1371.555625] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a2244da-5ab2-4727-9cce-bbe5efb20b78 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.566611] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Created folder: Project (9b98d078fef845cf87f6d932885790e1) in parent group-v376927. [ 1371.566611] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Creating folder: Instances. Parent ref: group-v376974. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1371.567709] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-883b5f9f-48c9-4aaf-9ac9-31af53b39cef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.578414] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Created folder: Instances in parent group-v376974. [ 1371.578539] env[63241]: DEBUG oslo.service.loopingcall [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1371.578728] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1371.579498] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d064308-6d7c-4690-98e3-bc7536321344 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.604789] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1371.604789] env[63241]: value = "task-1819913" [ 1371.604789] env[63241]: _type = "Task" [ 1371.604789] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.617028] env[63241]: DEBUG nova.network.neutron [-] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.618533] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819913, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.646781] env[63241]: DEBUG nova.compute.manager [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1371.665938] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819909, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.705643] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819910, 'name': Rename_Task, 'duration_secs': 0.153168} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.705887] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1371.706126] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-091f0ea4-613a-4180-8d9d-23754e59071f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.717456] env[63241]: DEBUG oslo_concurrency.lockutils [req-6cf30618-5274-4a83-a32e-92af3a50280c req-4b39fec3-e76c-4327-b043-36b0be8917d1 service nova] Releasing lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1371.717941] env[63241]: DEBUG oslo_vmware.api [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819906, 'name': PowerOnVM_Task, 'duration_secs': 0.685974} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.719424] env[63241]: DEBUG oslo_concurrency.lockutils [req-2addde78-f4ec-4d9e-ab96-b10c3b8891e2 req-efbd7b52-8513-4e35-b79a-571cfc845fb7 service nova] Acquired lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.719654] env[63241]: DEBUG nova.network.neutron [req-2addde78-f4ec-4d9e-ab96-b10c3b8891e2 req-efbd7b52-8513-4e35-b79a-571cfc845fb7 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Refreshing network info cache for port 61bceac0-2e58-4bc3-92f6-c421aabdfc8b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1371.720635] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1371.720921] env[63241]: INFO nova.compute.manager [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Took 11.27 seconds to spawn the instance on the hypervisor. [ 1371.721037] env[63241]: DEBUG nova.compute.manager [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1371.721355] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for the task: (returnval){ [ 1371.721355] env[63241]: value = "task-1819914" [ 1371.721355] env[63241]: _type = "Task" [ 1371.721355] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.722497] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f6830b-773b-4278-85da-1656bb3cf343 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.732757] env[63241]: DEBUG nova.compute.manager [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1371.742666] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819914, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.784071] env[63241]: DEBUG nova.virt.hardware [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1371.784348] env[63241]: DEBUG nova.virt.hardware [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1371.784845] env[63241]: DEBUG nova.virt.hardware [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1371.784845] env[63241]: DEBUG nova.virt.hardware [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1371.784845] env[63241]: DEBUG nova.virt.hardware [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1371.784953] env[63241]: DEBUG nova.virt.hardware [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1371.785442] env[63241]: DEBUG nova.virt.hardware [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1371.785504] env[63241]: DEBUG nova.virt.hardware [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1371.786180] env[63241]: DEBUG nova.virt.hardware [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1371.786180] env[63241]: DEBUG nova.virt.hardware [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1371.786180] env[63241]: DEBUG nova.virt.hardware [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1371.787175] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f207255-9596-4498-b7c0-a80d064b8d70 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.799615] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b1fe4c-7148-46ee-8b7a-f74a3f204385 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.822461] env[63241]: DEBUG oslo_concurrency.lockutils [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.108s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.826054] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.822s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.827758] env[63241]: INFO nova.compute.claims [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1371.862404] env[63241]: INFO nova.scheduler.client.report [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Deleted allocations for instance bbb94f08-7df2-457e-bc5b-d0008839cf20 [ 1372.115269] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819913, 'name': CreateVM_Task, 'duration_secs': 0.427788} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.115417] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1372.116352] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1372.116460] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.117163] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1372.117163] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3fb7e95-9df5-46b3-b667-b5723e1d97ad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.119299] env[63241]: INFO nova.compute.manager [-] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Took 1.53 seconds to deallocate network for instance. [ 1372.125434] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for the task: (returnval){ [ 1372.125434] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52dfba1c-e556-611c-4573-61bf60c9870f" [ 1372.125434] env[63241]: _type = "Task" [ 1372.125434] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.135009] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dfba1c-e556-611c-4573-61bf60c9870f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.168575] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819909, 'name': CreateVM_Task, 'duration_secs': 0.54535} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.168784] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1372.169488] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1372.179186] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.239027] env[63241]: DEBUG oslo_vmware.api [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1819914, 'name': PowerOnVM_Task, 'duration_secs': 0.500596} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.239027] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1372.239027] env[63241]: INFO nova.compute.manager [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Took 9.26 seconds to spawn the instance on the hypervisor. [ 1372.239027] env[63241]: DEBUG nova.compute.manager [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1372.239027] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38da8037-4270-4444-b923-0ecd5ddcd20d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.258369] env[63241]: INFO nova.compute.manager [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Took 26.99 seconds to build instance. [ 1372.378290] env[63241]: DEBUG oslo_concurrency.lockutils [None req-360e5ce1-e829-47eb-b06c-8245b54df967 tempest-DeleteServersAdminTestJSON-1444587359 tempest-DeleteServersAdminTestJSON-1444587359-project-admin] Lock "bbb94f08-7df2-457e-bc5b-d0008839cf20" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.341s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.626968] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.639733] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dfba1c-e556-611c-4573-61bf60c9870f, 'name': SearchDatastore_Task, 'duration_secs': 0.013352} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.639733] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.639907] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1372.640164] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1372.640355] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.640579] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1372.640937] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.641423] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1372.641754] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f57b7449-f10a-4997-9c3b-9d3e6a9463b3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.644706] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d2aceda-860b-4bd7-bee5-2f339359210f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.651341] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for the task: (returnval){ [ 1372.651341] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52401bb5-d95b-97af-fd27-3d22ea7f99b2" [ 1372.651341] env[63241]: _type = "Task" [ 1372.651341] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.660191] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52401bb5-d95b-97af-fd27-3d22ea7f99b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.661493] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1372.662072] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1372.663172] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fb97ecc-7850-4c5a-babb-c1a155057ee3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.668411] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for the task: (returnval){ [ 1372.668411] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521b4675-7592-3390-1637-93fe70df33bb" [ 1372.668411] env[63241]: _type = "Task" [ 1372.668411] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.677777] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521b4675-7592-3390-1637-93fe70df33bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.758177] env[63241]: DEBUG nova.network.neutron [req-2addde78-f4ec-4d9e-ab96-b10c3b8891e2 req-efbd7b52-8513-4e35-b79a-571cfc845fb7 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Updated VIF entry in instance network info cache for port 61bceac0-2e58-4bc3-92f6-c421aabdfc8b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1372.759653] env[63241]: DEBUG nova.network.neutron [req-2addde78-f4ec-4d9e-ab96-b10c3b8891e2 req-efbd7b52-8513-4e35-b79a-571cfc845fb7 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Updating instance_info_cache with network_info: [{"id": "61bceac0-2e58-4bc3-92f6-c421aabdfc8b", "address": "fa:16:3e:22:0e:78", "network": {"id": "7f61cd58-880b-488b-8ae6-bc6d0b2a3806", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1752925058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1a99e9ac8f4fc0bbd763a9d91321af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61bceac0-2e", "ovs_interfaceid": "61bceac0-2e58-4bc3-92f6-c421aabdfc8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.767374] env[63241]: DEBUG oslo_concurrency.lockutils [None req-43e76b2b-8f40-4829-9832-98fba7bf0918 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "99eccbef-0e76-4532-af2f-5d74e563e1d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.510s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.770504] env[63241]: INFO nova.compute.manager [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Took 25.07 seconds to build instance. [ 1372.858060] env[63241]: DEBUG nova.compute.manager [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Received event network-vif-plugged-cd1c9913-c7c6-4258-9006-ee7987594482 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1372.858336] env[63241]: DEBUG oslo_concurrency.lockutils [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] Acquiring lock "eb506425-4ecc-44b7-afa4-0901fc60b04f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.858584] env[63241]: DEBUG oslo_concurrency.lockutils [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] Lock "eb506425-4ecc-44b7-afa4-0901fc60b04f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.858838] env[63241]: DEBUG oslo_concurrency.lockutils [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] Lock "eb506425-4ecc-44b7-afa4-0901fc60b04f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.858904] env[63241]: DEBUG nova.compute.manager [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] No waiting events found dispatching network-vif-plugged-cd1c9913-c7c6-4258-9006-ee7987594482 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1372.859047] env[63241]: WARNING nova.compute.manager [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Received unexpected event network-vif-plugged-cd1c9913-c7c6-4258-9006-ee7987594482 for instance with vm_state building and task_state spawning. [ 1372.859221] env[63241]: DEBUG nova.compute.manager [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Received event network-changed-cd1c9913-c7c6-4258-9006-ee7987594482 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1372.859375] env[63241]: DEBUG nova.compute.manager [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Refreshing instance network info cache due to event network-changed-cd1c9913-c7c6-4258-9006-ee7987594482. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1372.859563] env[63241]: DEBUG oslo_concurrency.lockutils [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] Acquiring lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1372.859698] env[63241]: DEBUG oslo_concurrency.lockutils [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] Acquired lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.859852] env[63241]: DEBUG nova.network.neutron [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Refreshing network info cache for port cd1c9913-c7c6-4258-9006-ee7987594482 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1373.118901] env[63241]: DEBUG nova.compute.manager [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Received event network-vif-plugged-1e0eeec7-9caf-4069-8cad-d1d0d038ea2b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1373.119139] env[63241]: DEBUG oslo_concurrency.lockutils [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] Acquiring lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.119368] env[63241]: DEBUG oslo_concurrency.lockutils [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] Lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.119546] env[63241]: DEBUG oslo_concurrency.lockutils [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] Lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.119706] env[63241]: DEBUG nova.compute.manager [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] No waiting events found dispatching network-vif-plugged-1e0eeec7-9caf-4069-8cad-d1d0d038ea2b {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1373.119887] env[63241]: WARNING nova.compute.manager [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Received unexpected event network-vif-plugged-1e0eeec7-9caf-4069-8cad-d1d0d038ea2b for instance with vm_state building and task_state spawning. [ 1373.120622] env[63241]: DEBUG nova.compute.manager [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Received event network-changed-1e0eeec7-9caf-4069-8cad-d1d0d038ea2b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1373.120622] env[63241]: DEBUG nova.compute.manager [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Refreshing instance network info cache due to event network-changed-1e0eeec7-9caf-4069-8cad-d1d0d038ea2b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1373.120767] env[63241]: DEBUG oslo_concurrency.lockutils [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] Acquiring lock "refresh_cache-3c51d4dc-5a2c-4483-9aa5-8bab532971d4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1373.120886] env[63241]: DEBUG oslo_concurrency.lockutils [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] Acquired lock "refresh_cache-3c51d4dc-5a2c-4483-9aa5-8bab532971d4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1373.121064] env[63241]: DEBUG nova.network.neutron [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Refreshing network info cache for port 1e0eeec7-9caf-4069-8cad-d1d0d038ea2b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1373.166734] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52401bb5-d95b-97af-fd27-3d22ea7f99b2, 'name': SearchDatastore_Task, 'duration_secs': 0.026117} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.170981] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.170981] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1373.170981] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1373.180487] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521b4675-7592-3390-1637-93fe70df33bb, 'name': SearchDatastore_Task, 'duration_secs': 0.022709} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.184696] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afa7ee16-134e-46ce-924e-217869be212e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.190508] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for the task: (returnval){ [ 1373.190508] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52649281-1eb1-affc-4f22-9f90c759fe96" [ 1373.190508] env[63241]: _type = "Task" [ 1373.190508] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.205826] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52649281-1eb1-affc-4f22-9f90c759fe96, 'name': SearchDatastore_Task, 'duration_secs': 0.009796} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.206046] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.206305] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] eb506425-4ecc-44b7-afa4-0901fc60b04f/eb506425-4ecc-44b7-afa4-0901fc60b04f.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1373.206597] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1373.206842] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1373.207059] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29ea59f8-2814-4b1e-938e-c000fa36e9dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.212840] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-975cf7e1-f7f5-401a-a910-42ed8fcb7265 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.224914] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1373.224914] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1373.227671] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e21aed62-f57b-4daf-ad3e-92a623b83c53 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.229957] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for the task: (returnval){ [ 1373.229957] env[63241]: value = "task-1819915" [ 1373.229957] env[63241]: _type = "Task" [ 1373.229957] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.239524] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for the task: (returnval){ [ 1373.239524] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52fbeb97-4e06-7da3-98f1-4f6a10af1241" [ 1373.239524] env[63241]: _type = "Task" [ 1373.239524] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.247185] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819915, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.253066] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fbeb97-4e06-7da3-98f1-4f6a10af1241, 'name': SearchDatastore_Task, 'duration_secs': 0.009285} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.257197] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd6b859-8b3f-4c26-9695-0db938425fdd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.259541] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6a736a3-f105-4589-923d-87baf1ab1120 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.269606] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369b98c2-7d5b-4b07-9636-f23cff2f9efb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.271415] env[63241]: DEBUG oslo_concurrency.lockutils [req-2addde78-f4ec-4d9e-ab96-b10c3b8891e2 req-efbd7b52-8513-4e35-b79a-571cfc845fb7 service nova] Releasing lock "refresh_cache-a1f24cfe-88f0-4e73-9ade-2dcf907848a1" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.273676] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for the task: (returnval){ [ 1373.273676] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5210ff00-3d26-e54a-5916-2718ec12d55a" [ 1373.273676] env[63241]: _type = "Task" [ 1373.273676] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.273676] env[63241]: DEBUG nova.compute.manager [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1373.275250] env[63241]: DEBUG oslo_concurrency.lockutils [None req-32764a7c-7de9-4c55-8d09-103670193a07 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.584s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.318345] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b6a386-b79d-4f2d-b2b9-69550a275e37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.325011] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5210ff00-3d26-e54a-5916-2718ec12d55a, 'name': SearchDatastore_Task, 'duration_secs': 0.010171} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.325723] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.326037] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 3c51d4dc-5a2c-4483-9aa5-8bab532971d4/3c51d4dc-5a2c-4483-9aa5-8bab532971d4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1373.326893] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e45079d-5845-4f1f-a1f1-c9ef56a703cb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.332549] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed6b518-49a8-4ac7-9b4f-f96b631d22fc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.338923] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for the task: (returnval){ [ 1373.338923] env[63241]: value = "task-1819916" [ 1373.338923] env[63241]: _type = "Task" [ 1373.338923] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.352666] env[63241]: DEBUG nova.compute.provider_tree [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.361050] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819916, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.745396] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819915, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470627} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.748399] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] eb506425-4ecc-44b7-afa4-0901fc60b04f/eb506425-4ecc-44b7-afa4-0901fc60b04f.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1373.748931] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1373.748931] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-009388cf-5b04-447a-8287-d2c6ac445d13 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.756158] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for the task: (returnval){ [ 1373.756158] env[63241]: value = "task-1819917" [ 1373.756158] env[63241]: _type = "Task" [ 1373.756158] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.766612] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819917, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.816839] env[63241]: DEBUG nova.compute.manager [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1373.824567] env[63241]: DEBUG nova.network.neutron [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Updated VIF entry in instance network info cache for port cd1c9913-c7c6-4258-9006-ee7987594482. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1373.825253] env[63241]: DEBUG nova.network.neutron [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Updating instance_info_cache with network_info: [{"id": "cd1c9913-c7c6-4258-9006-ee7987594482", "address": "fa:16:3e:93:7c:04", "network": {"id": "d2d9d4dd-5702-4a7f-a301-7f4d520edfbf", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1366579599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b98d078fef845cf87f6d932885790e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd1c9913-c7", "ovs_interfaceid": "cd1c9913-c7c6-4258-9006-ee7987594482", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.849048] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.858688] env[63241]: DEBUG nova.scheduler.client.report [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1373.863248] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819916, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.907242] env[63241]: DEBUG nova.network.neutron [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Updated VIF entry in instance network info cache for port 1e0eeec7-9caf-4069-8cad-d1d0d038ea2b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1373.907610] env[63241]: DEBUG nova.network.neutron [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Updating instance_info_cache with network_info: [{"id": "1e0eeec7-9caf-4069-8cad-d1d0d038ea2b", "address": "fa:16:3e:e4:1a:ee", "network": {"id": "1cb2bfac-6339-4368-9554-14333bff6b76", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-149124339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfb1bfc0a12b45cf84d18d038c94fe3a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e0eeec7-9c", "ovs_interfaceid": "1e0eeec7-9caf-4069-8cad-d1d0d038ea2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1374.216649] env[63241]: DEBUG nova.network.neutron [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Successfully updated port: 4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1374.270095] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819917, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079826} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.270578] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1374.272558] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0743fe4-ce73-49a6-be20-2d7c7cea21f8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.299613] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] eb506425-4ecc-44b7-afa4-0901fc60b04f/eb506425-4ecc-44b7-afa4-0901fc60b04f.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1374.299983] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31c1db56-41d3-4f36-9cac-c7c6fd4f68a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.327527] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for the task: (returnval){ [ 1374.327527] env[63241]: value = "task-1819918" [ 1374.327527] env[63241]: _type = "Task" [ 1374.327527] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.330156] env[63241]: DEBUG oslo_concurrency.lockutils [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] Releasing lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1374.330378] env[63241]: DEBUG nova.compute.manager [req-ee4b0009-2b29-4b46-b801-b552fb2f5f59 req-3703de81-631b-4356-9d5a-d2d65c8926a8 service nova] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Received event network-vif-deleted-4388c54d-69af-4eb2-8f0b-e40773bf2e95 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1374.342349] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819918, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.352360] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819916, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.772264} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.352471] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 3c51d4dc-5a2c-4483-9aa5-8bab532971d4/3c51d4dc-5a2c-4483-9aa5-8bab532971d4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1374.352713] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1374.353727] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e98808b0-0eef-4d90-9510-ebbd82d8ecf6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.356248] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.361026] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for the task: (returnval){ [ 1374.361026] env[63241]: value = "task-1819919" [ 1374.361026] env[63241]: _type = "Task" [ 1374.361026] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.364897] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.539s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.365357] env[63241]: DEBUG nova.compute.manager [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1374.369025] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.764s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.370906] env[63241]: INFO nova.compute.claims [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1374.379220] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819919, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.411489] env[63241]: DEBUG oslo_concurrency.lockutils [req-c0fe54a5-6b1b-4025-b744-ea151d7f6001 req-bd9c09f7-978f-45b2-988c-bf4206aeebd1 service nova] Releasing lock "refresh_cache-3c51d4dc-5a2c-4483-9aa5-8bab532971d4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1374.434339] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquiring lock "40217405-dcba-48cf-9d92-4122390d9fa8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.434644] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Lock "40217405-dcba-48cf-9d92-4122390d9fa8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.468023] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "99eccbef-0e76-4532-af2f-5d74e563e1d2" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.468023] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "99eccbef-0e76-4532-af2f-5d74e563e1d2" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.468023] env[63241]: DEBUG nova.compute.manager [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1374.468023] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56af9647-7c5b-4ba6-acc4-39239d6aadb6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.474886] env[63241]: DEBUG nova.compute.manager [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63241) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1374.475487] env[63241]: DEBUG nova.objects.instance [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lazy-loading 'flavor' on Instance uuid 99eccbef-0e76-4532-af2f-5d74e563e1d2 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1374.720154] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.720343] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquired lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.720465] env[63241]: DEBUG nova.network.neutron [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1374.842822] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819918, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.872371] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819919, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07447} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.872652] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1374.873716] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657d5efe-e306-4d53-affc-6e3f8aace7a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.878266] env[63241]: DEBUG nova.compute.utils [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1374.882299] env[63241]: DEBUG nova.compute.manager [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1374.882476] env[63241]: DEBUG nova.network.neutron [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1374.907542] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 3c51d4dc-5a2c-4483-9aa5-8bab532971d4/3c51d4dc-5a2c-4483-9aa5-8bab532971d4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1374.909795] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8619743-3df6-4472-87cc-310beae1f5da {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.927095] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.927323] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.932968] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for the task: (returnval){ [ 1374.932968] env[63241]: value = "task-1819920" [ 1374.932968] env[63241]: _type = "Task" [ 1374.932968] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.943668] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819920, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.981416] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1374.982358] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b14d086a-a77e-4cc7-a06d-86a572326b26 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.990156] env[63241]: DEBUG oslo_vmware.api [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1374.990156] env[63241]: value = "task-1819921" [ 1374.990156] env[63241]: _type = "Task" [ 1374.990156] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.002062] env[63241]: DEBUG oslo_vmware.api [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819921, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.027939] env[63241]: DEBUG nova.policy [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df510c48a0bd4417a70a13257a6eacc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6300a112f8f749e9a26007bb80dee152', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1375.276670] env[63241]: DEBUG nova.network.neutron [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1375.341412] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819918, 'name': ReconfigVM_Task, 'duration_secs': 0.635316} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.341412] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Reconfigured VM instance instance-0000000f to attach disk [datastore1] eb506425-4ecc-44b7-afa4-0901fc60b04f/eb506425-4ecc-44b7-afa4-0901fc60b04f.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1375.341966] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-646a91e0-6412-477b-91fa-3fbd1fd73831 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.349040] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for the task: (returnval){ [ 1375.349040] env[63241]: value = "task-1819922" [ 1375.349040] env[63241]: _type = "Task" [ 1375.349040] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.360746] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819922, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.384376] env[63241]: DEBUG nova.compute.manager [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1375.445238] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819920, 'name': ReconfigVM_Task, 'duration_secs': 0.296319} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.445462] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 3c51d4dc-5a2c-4483-9aa5-8bab532971d4/3c51d4dc-5a2c-4483-9aa5-8bab532971d4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1375.447887] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-448d0ee9-b8fb-4d85-983b-c54d6c1e2e5f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.458508] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for the task: (returnval){ [ 1375.458508] env[63241]: value = "task-1819923" [ 1375.458508] env[63241]: _type = "Task" [ 1375.458508] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.469868] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819923, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.506687] env[63241]: DEBUG oslo_vmware.api [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819921, 'name': PowerOffVM_Task, 'duration_secs': 0.397814} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.507045] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1375.507210] env[63241]: DEBUG nova.compute.manager [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1375.508061] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b7573f-64d2-4ed0-9989-5d35eff9469c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.545924] env[63241]: DEBUG nova.network.neutron [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updating instance_info_cache with network_info: [{"id": "4e927ec6-c091-40ea-8276-55eb762b414d", "address": "fa:16:3e:13:6a:ad", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e927ec6-c0", "ovs_interfaceid": "4e927ec6-c091-40ea-8276-55eb762b414d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.792422] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "27177719-5090-43de-9bca-6db6bebab7b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.792999] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "27177719-5090-43de-9bca-6db6bebab7b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.860422] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819922, 'name': Rename_Task, 'duration_secs': 0.303061} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.862909] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1375.863722] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08c171e3-1f26-4354-a560-89ab9648f2c6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.870520] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for the task: (returnval){ [ 1375.870520] env[63241]: value = "task-1819924" [ 1375.870520] env[63241]: _type = "Task" [ 1375.870520] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.875959] env[63241]: DEBUG nova.network.neutron [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Successfully created port: f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1375.886426] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819924, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.900352] env[63241]: INFO nova.virt.block_device [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Booting with volume c32b5066-e324-4377-90d0-ef224dd92932 at /dev/sda [ 1375.936594] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0a267a-5587-4cb3-b43c-4b2084e3f17f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.944554] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a7c5c7-2297-4c35-ace6-692050883039 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.981602] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc3b0dd-5a31-4721-aae2-16af851ef895 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.984188] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e18cce45-de56-4960-a52a-709c48e4e660 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.994544] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6fd7dc-ad51-4129-91c0-ebc4175c201c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.998763] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819923, 'name': Rename_Task, 'duration_secs': 0.15263} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.001580] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e78e037-39c6-4eb9-9245-1bf234c847f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.019543] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1376.019543] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-868a5bc5-3a82-471b-87c8-fbaa51ff0008 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.032017] env[63241]: DEBUG nova.compute.provider_tree [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1376.033579] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3aaf5e-0da5-4fe5-9a4e-849f46c5c57a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "99eccbef-0e76-4532-af2f-5d74e563e1d2" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.569s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.045621] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b81ae510-19e8-4f3b-b130-5d366f96f93a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.047683] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for the task: (returnval){ [ 1376.047683] env[63241]: value = "task-1819925" [ 1376.047683] env[63241]: _type = "Task" [ 1376.047683] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.054095] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Releasing lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.054574] env[63241]: DEBUG nova.compute.manager [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Instance network_info: |[{"id": "4e927ec6-c091-40ea-8276-55eb762b414d", "address": "fa:16:3e:13:6a:ad", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e927ec6-c0", "ovs_interfaceid": "4e927ec6-c091-40ea-8276-55eb762b414d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1376.056068] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:6a:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3d7e184-c87f-47a5-8d0d-9fa20e07e669', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e927ec6-c091-40ea-8276-55eb762b414d', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1376.063595] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Creating folder: Project (b8ae8ca872a549a6918b0e060d4b4af0). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1376.066595] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48d9ff7-3706-4043-be62-f671e11d5c95 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.082158] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f51521e-f2b2-4d6c-986d-3028a515de0b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.083307] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819925, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.091784] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Created folder: Project (b8ae8ca872a549a6918b0e060d4b4af0) in parent group-v376927. [ 1376.091925] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Creating folder: Instances. Parent ref: group-v376977. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1376.092189] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b6b5d3f-136e-4fe7-bbe7-350649ef73c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.106084] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9247c9e0-7fe4-4c73-b890-c29d448a16db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.108667] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Created folder: Instances in parent group-v376977. [ 1376.108895] env[63241]: DEBUG oslo.service.loopingcall [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1376.109108] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1376.109726] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3c51947-33ef-4fd2-aa0b-a650d16bf295 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.128093] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82feecdb-5984-4bb9-847b-05e26717440a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.134037] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1376.134037] env[63241]: value = "task-1819928" [ 1376.134037] env[63241]: _type = "Task" [ 1376.134037] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.139477] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819928, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.143567] env[63241]: DEBUG nova.virt.block_device [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Updating existing volume attachment record: ff799f61-fd73-4fc3-91cc-f0c755791acc {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1376.382616] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819924, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.547611] env[63241]: DEBUG nova.scheduler.client.report [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1376.567074] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819925, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.605059] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquiring lock "e2758650-2762-49f6-a678-f55425a89994" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.605454] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Lock "e2758650-2762-49f6-a678-f55425a89994" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.644360] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819928, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.747396] env[63241]: DEBUG nova.compute.manager [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Received event network-vif-plugged-4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1376.747606] env[63241]: DEBUG oslo_concurrency.lockutils [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] Acquiring lock "0c72c98b-57f0-44e5-9159-490b27eac3a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.747806] env[63241]: DEBUG oslo_concurrency.lockutils [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] Lock "0c72c98b-57f0-44e5-9159-490b27eac3a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.747964] env[63241]: DEBUG oslo_concurrency.lockutils [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] Lock "0c72c98b-57f0-44e5-9159-490b27eac3a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.748619] env[63241]: DEBUG nova.compute.manager [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] No waiting events found dispatching network-vif-plugged-4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1376.748619] env[63241]: WARNING nova.compute.manager [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Received unexpected event network-vif-plugged-4e927ec6-c091-40ea-8276-55eb762b414d for instance with vm_state building and task_state spawning. [ 1376.748905] env[63241]: DEBUG nova.compute.manager [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Received event network-changed-4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1376.748905] env[63241]: DEBUG nova.compute.manager [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Refreshing instance network info cache due to event network-changed-4e927ec6-c091-40ea-8276-55eb762b414d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1376.749559] env[63241]: DEBUG oslo_concurrency.lockutils [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] Acquiring lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.749559] env[63241]: DEBUG oslo_concurrency.lockutils [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] Acquired lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.749559] env[63241]: DEBUG nova.network.neutron [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Refreshing network info cache for port 4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1376.887530] env[63241]: DEBUG oslo_vmware.api [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819924, 'name': PowerOnVM_Task, 'duration_secs': 0.627684} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.889807] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1376.894236] env[63241]: INFO nova.compute.manager [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Took 7.78 seconds to spawn the instance on the hypervisor. [ 1376.894236] env[63241]: DEBUG nova.compute.manager [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1376.894616] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabbba16-2708-48d2-940f-59871c1653c4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.059456] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.059456] env[63241]: DEBUG nova.compute.manager [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1377.062418] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.429s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.062665] env[63241]: DEBUG nova.objects.instance [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Lazy-loading 'resources' on Instance uuid 69c73342-258a-4b00-ba1b-ffdd5f247890 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1377.075668] env[63241]: DEBUG oslo_vmware.api [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1819925, 'name': PowerOnVM_Task, 'duration_secs': 0.869263} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.075668] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1377.075668] env[63241]: INFO nova.compute.manager [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Took 10.56 seconds to spawn the instance on the hypervisor. [ 1377.075668] env[63241]: DEBUG nova.compute.manager [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1377.075668] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc962a1-85db-4a96-8bf2-626a48b8b88e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.144630] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819928, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.420193] env[63241]: INFO nova.compute.manager [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Took 24.62 seconds to build instance. [ 1377.549919] env[63241]: DEBUG nova.network.neutron [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updated VIF entry in instance network info cache for port 4e927ec6-c091-40ea-8276-55eb762b414d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1377.550318] env[63241]: DEBUG nova.network.neutron [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updating instance_info_cache with network_info: [{"id": "4e927ec6-c091-40ea-8276-55eb762b414d", "address": "fa:16:3e:13:6a:ad", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e927ec6-c0", "ovs_interfaceid": "4e927ec6-c091-40ea-8276-55eb762b414d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.565830] env[63241]: DEBUG nova.compute.utils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1377.567419] env[63241]: DEBUG nova.compute.manager [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1377.567681] env[63241]: DEBUG nova.network.neutron [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1377.594824] env[63241]: INFO nova.compute.manager [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Took 25.73 seconds to build instance. [ 1377.621278] env[63241]: DEBUG nova.policy [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f52bf5741a490c83e01e06f686559e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c64d07a686b414f93ec4c599307498f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1377.650365] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819928, 'name': CreateVM_Task, 'duration_secs': 1.198721} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.650600] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1377.651169] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.651333] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.651805] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1377.652042] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e1d1aa1-7cca-433d-ac38-4ddd2c86eaa2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.656944] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1377.656944] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f4e695-9353-7058-bbc1-36ae3309fbf4" [ 1377.656944] env[63241]: _type = "Task" [ 1377.656944] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.667182] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f4e695-9353-7058-bbc1-36ae3309fbf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.900163] env[63241]: DEBUG nova.network.neutron [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Successfully updated port: f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1377.922428] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b4cdfd15-dc95-479a-b785-52d0b6bf6acd tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Lock "eb506425-4ecc-44b7-afa4-0901fc60b04f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.648s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.932906] env[63241]: DEBUG nova.network.neutron [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Successfully created port: 003d3fab-f7ce-4892-b925-c2280d3a9ae2 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1378.059993] env[63241]: DEBUG oslo_concurrency.lockutils [req-9c1dd6cd-daff-488b-8375-399e31b84fdd req-7ffd4dc1-aa06-4e39-b6e5-dec5f89de7ce service nova] Releasing lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.075878] env[63241]: DEBUG nova.compute.manager [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1378.095278] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663e708e-971b-47ff-be36-aefd4a5e8909 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.099746] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3fffc130-0f5c-4263-985c-e8dfac60a04a tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.019s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.108228] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f268ef86-36bb-4f50-acbf-e010404821ef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.142798] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2f9464-7c89-46ce-8357-288bb9a2256a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.151748] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d88d71-b59a-4639-bae6-08373227cc0e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.168884] env[63241]: DEBUG nova.compute.provider_tree [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1378.181749] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f4e695-9353-7058-bbc1-36ae3309fbf4, 'name': SearchDatastore_Task, 'duration_secs': 0.021291} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.182696] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.182928] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1378.183300] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.183377] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.183619] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1378.184126] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f0129e5-5a89-4918-ad66-69559df2b3b4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.196059] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1378.196125] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1378.196890] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49a282e9-9db3-4bc4-a902-9039bd120c9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.207596] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1378.207596] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b01805-382c-09f0-f306-c1ef15966e12" [ 1378.207596] env[63241]: _type = "Task" [ 1378.207596] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.221025] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b01805-382c-09f0-f306-c1ef15966e12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.402623] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Acquiring lock "refresh_cache-c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.402791] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Acquired lock "refresh_cache-c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.402963] env[63241]: DEBUG nova.network.neutron [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1378.426268] env[63241]: DEBUG nova.compute.manager [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1378.459079] env[63241]: DEBUG nova.compute.manager [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1378.459412] env[63241]: DEBUG nova.virt.hardware [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1378.459689] env[63241]: DEBUG nova.virt.hardware [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1378.459863] env[63241]: DEBUG nova.virt.hardware [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1378.460053] env[63241]: DEBUG nova.virt.hardware [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1378.463353] env[63241]: DEBUG nova.virt.hardware [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1378.463353] env[63241]: DEBUG nova.virt.hardware [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1378.463353] env[63241]: DEBUG nova.virt.hardware [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1378.463353] env[63241]: DEBUG nova.virt.hardware [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1378.463353] env[63241]: DEBUG nova.virt.hardware [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1378.464123] env[63241]: DEBUG nova.virt.hardware [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1378.464123] env[63241]: DEBUG nova.virt.hardware [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1378.464123] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf1abd0-57ae-4bc5-8bd1-ab9abb924460 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.471394] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94fa9fcb-06f2-458e-87da-919821b48e76 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.505954] env[63241]: DEBUG nova.compute.manager [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1378.507219] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e19137-5a90-4c8f-bf11-82cb4980b86f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.603824] env[63241]: DEBUG nova.compute.manager [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1378.675182] env[63241]: DEBUG nova.scheduler.client.report [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1378.723769] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b01805-382c-09f0-f306-c1ef15966e12, 'name': SearchDatastore_Task, 'duration_secs': 0.023787} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.723769] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffe5d351-51d8-4dbf-8fe6-aa4a51f10fd5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.728326] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1378.728326] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52fed3eb-c010-9d48-c88c-0db577bfac30" [ 1378.728326] env[63241]: _type = "Task" [ 1378.728326] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.737214] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fed3eb-c010-9d48-c88c-0db577bfac30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.956037] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.002684] env[63241]: DEBUG nova.network.neutron [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1379.023322] env[63241]: INFO nova.compute.manager [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] instance snapshotting [ 1379.023637] env[63241]: WARNING nova.compute.manager [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1379.026882] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94dac45f-ebd3-4f82-b7a5-5b9f1a96488d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.054483] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e579651c-01ed-42e4-894a-a57b34e8bba2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.085404] env[63241]: DEBUG nova.compute.manager [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1379.114312] env[63241]: DEBUG nova.virt.hardware [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1379.114669] env[63241]: DEBUG nova.virt.hardware [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1379.114889] env[63241]: DEBUG nova.virt.hardware [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1379.115165] env[63241]: DEBUG nova.virt.hardware [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1379.115385] env[63241]: DEBUG nova.virt.hardware [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1379.115634] env[63241]: DEBUG nova.virt.hardware [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1379.115946] env[63241]: DEBUG nova.virt.hardware [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1379.116211] env[63241]: DEBUG nova.virt.hardware [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1379.116485] env[63241]: DEBUG nova.virt.hardware [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1379.116716] env[63241]: DEBUG nova.virt.hardware [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1379.116954] env[63241]: DEBUG nova.virt.hardware [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1379.118645] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ec23d0-6430-44e2-bfd3-5fe50357b6c1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.131885] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d477d9-64df-481a-9558-5465002f055e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.150537] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.181109] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.118s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.187615] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.959s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.187943] env[63241]: DEBUG nova.objects.instance [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Lazy-loading 'resources' on Instance uuid 0440c0a8-f065-4a82-b190-33279e7c0d93 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1379.205631] env[63241]: INFO nova.scheduler.client.report [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Deleted allocations for instance 69c73342-258a-4b00-ba1b-ffdd5f247890 [ 1379.240466] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fed3eb-c010-9d48-c88c-0db577bfac30, 'name': SearchDatastore_Task, 'duration_secs': 0.031135} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.240788] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.242273] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0c72c98b-57f0-44e5-9159-490b27eac3a6/0c72c98b-57f0-44e5-9159-490b27eac3a6.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1379.242273] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f807144-4fbf-4a2a-bba1-136fac9643ee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.252052] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1379.252052] env[63241]: value = "task-1819929" [ 1379.252052] env[63241]: _type = "Task" [ 1379.252052] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.261439] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.454008] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1379.454008] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1379.494940] env[63241]: DEBUG nova.network.neutron [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Updating instance_info_cache with network_info: [{"id": "f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e", "address": "fa:16:3e:3a:c0:84", "network": {"id": "49c2943c-4145-4b92-aa07-343c928ed805", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1351064395-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6300a112f8f749e9a26007bb80dee152", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9f1a2e1-b5", "ovs_interfaceid": "f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.568019] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1379.568019] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a21c6d2a-b2b1-4f8c-90a6-4ff6f1179cc0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.576812] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1379.576812] env[63241]: value = "task-1819930" [ 1379.576812] env[63241]: _type = "Task" [ 1379.576812] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.587628] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819930, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.721024] env[63241]: DEBUG oslo_concurrency.lockutils [None req-38b37074-0007-4ad2-a4dc-9e49dfa7ca56 tempest-ServerDiagnosticsV248Test-1332899730 tempest-ServerDiagnosticsV248Test-1332899730-project-member] Lock "69c73342-258a-4b00-ba1b-ffdd5f247890" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.937s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.774952] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819929, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.819694] env[63241]: DEBUG nova.compute.manager [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Received event network-changed-46d3ef2e-5410-4151-8ec8-30a6f2e5e221 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1379.820758] env[63241]: DEBUG nova.compute.manager [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Refreshing instance network info cache due to event network-changed-46d3ef2e-5410-4151-8ec8-30a6f2e5e221. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1379.820758] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Acquiring lock "refresh_cache-44508cc6-c576-4c30-8559-75118ceba02a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.820758] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Acquired lock "refresh_cache-44508cc6-c576-4c30-8559-75118ceba02a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.820758] env[63241]: DEBUG nova.network.neutron [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Refreshing network info cache for port 46d3ef2e-5410-4151-8ec8-30a6f2e5e221 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1379.959342] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] There are 2 instances to clean {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1379.959854] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: bbb94f08-7df2-457e-bc5b-d0008839cf20] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1379.996679] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Releasing lock "refresh_cache-c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.997376] env[63241]: DEBUG nova.compute.manager [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Instance network_info: |[{"id": "f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e", "address": "fa:16:3e:3a:c0:84", "network": {"id": "49c2943c-4145-4b92-aa07-343c928ed805", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1351064395-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6300a112f8f749e9a26007bb80dee152", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9f1a2e1-b5", "ovs_interfaceid": "f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1379.997907] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:c0:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7bcd9d2d-25c8-41ad-9a4a-93b9029ba993', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1380.012048] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Creating folder: Project (6300a112f8f749e9a26007bb80dee152). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.012417] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-722040f6-ffc3-40d7-bda3-10b7c394a831 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.028430] env[63241]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1380.028631] env[63241]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63241) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1380.029338] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Folder already exists: Project (6300a112f8f749e9a26007bb80dee152). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1380.029517] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Creating folder: Instances. Parent ref: group-v376931. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.029763] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc329fa1-4376-4c55-b9dc-9405658b355e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.039916] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Created folder: Instances in parent group-v376931. [ 1380.040235] env[63241]: DEBUG oslo.service.loopingcall [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1380.040456] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1380.040679] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-713da934-e0b2-4f8f-b872-e7a81f9ab5de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.064790] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1380.064790] env[63241]: value = "task-1819933" [ 1380.064790] env[63241]: _type = "Task" [ 1380.064790] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.073780] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819933, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.089201] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819930, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.195516] env[63241]: DEBUG nova.network.neutron [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Successfully updated port: 003d3fab-f7ce-4892-b925-c2280d3a9ae2 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1380.230372] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84895e94-f0c3-4f9b-9b1e-b12cb768c124 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.239302] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769653e6-05dd-4acf-a91d-18d78a14db5a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.275641] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76eb8ab0-4b64-44dc-9bb0-124ca6161c2b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.287546] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e2227d-ac08-436c-aae6-8d5867389d9e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.291636] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.675305} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.291924] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0c72c98b-57f0-44e5-9159-490b27eac3a6/0c72c98b-57f0-44e5-9159-490b27eac3a6.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1380.292276] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1380.292920] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e25ab50-e650-459b-a7b1-cd034102e64d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.303546] env[63241]: DEBUG nova.compute.provider_tree [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1380.311222] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1380.311222] env[63241]: value = "task-1819934" [ 1380.311222] env[63241]: _type = "Task" [ 1380.311222] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.318587] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819934, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.471400] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 69c73342-258a-4b00-ba1b-ffdd5f247890] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1380.576121] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819933, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.594033] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819930, 'name': CreateSnapshot_Task, 'duration_secs': 0.920123} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.594033] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1380.594229] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1375395f-6503-44c7-975c-9bf0bf99e055 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.604994] env[63241]: DEBUG nova.network.neutron [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Updated VIF entry in instance network info cache for port 46d3ef2e-5410-4151-8ec8-30a6f2e5e221. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1380.606270] env[63241]: DEBUG nova.network.neutron [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Updating instance_info_cache with network_info: [{"id": "46d3ef2e-5410-4151-8ec8-30a6f2e5e221", "address": "fa:16:3e:47:8a:d3", "network": {"id": "a8367b18-022c-41b4-8c92-d1415c31263d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2039791152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fedeb3768ebc4b96bd5a85bfb0a03cf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46d3ef2e-54", "ovs_interfaceid": "46d3ef2e-5410-4151-8ec8-30a6f2e5e221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.698286] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "refresh_cache-11b1888e-95ec-4166-9219-0c38f8817dd4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.700200] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "refresh_cache-11b1888e-95ec-4166-9219-0c38f8817dd4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.700200] env[63241]: DEBUG nova.network.neutron [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1380.828752] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819934, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070689} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.829070] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1380.830407] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729d113f-857f-47a6-9bea-10c298b5b339 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.834467] env[63241]: ERROR nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [req-8d247437-2ab4-4fc4-9cf2-1654652d0bb4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8d247437-2ab4-4fc4-9cf2-1654652d0bb4"}]} [ 1380.864933] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 0c72c98b-57f0-44e5-9159-490b27eac3a6/0c72c98b-57f0-44e5-9159-490b27eac3a6.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1380.866983] env[63241]: DEBUG nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1380.870580] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb9ba707-9ae4-4a7d-b47d-0b4fffa9af8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.892351] env[63241]: DEBUG nova.compute.manager [req-5b5e281b-5f5d-4b17-9090-97f23f743fa5 req-c1b24e2f-2446-4753-99de-68ce59a8c8a8 service nova] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Received event network-vif-plugged-003d3fab-f7ce-4892-b925-c2280d3a9ae2 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1380.892351] env[63241]: DEBUG oslo_concurrency.lockutils [req-5b5e281b-5f5d-4b17-9090-97f23f743fa5 req-c1b24e2f-2446-4753-99de-68ce59a8c8a8 service nova] Acquiring lock "11b1888e-95ec-4166-9219-0c38f8817dd4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.892351] env[63241]: DEBUG oslo_concurrency.lockutils [req-5b5e281b-5f5d-4b17-9090-97f23f743fa5 req-c1b24e2f-2446-4753-99de-68ce59a8c8a8 service nova] Lock "11b1888e-95ec-4166-9219-0c38f8817dd4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.892351] env[63241]: DEBUG oslo_concurrency.lockutils [req-5b5e281b-5f5d-4b17-9090-97f23f743fa5 req-c1b24e2f-2446-4753-99de-68ce59a8c8a8 service nova] Lock "11b1888e-95ec-4166-9219-0c38f8817dd4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.892351] env[63241]: DEBUG nova.compute.manager [req-5b5e281b-5f5d-4b17-9090-97f23f743fa5 req-c1b24e2f-2446-4753-99de-68ce59a8c8a8 service nova] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] No waiting events found dispatching network-vif-plugged-003d3fab-f7ce-4892-b925-c2280d3a9ae2 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1380.892575] env[63241]: WARNING nova.compute.manager [req-5b5e281b-5f5d-4b17-9090-97f23f743fa5 req-c1b24e2f-2446-4753-99de-68ce59a8c8a8 service nova] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Received unexpected event network-vif-plugged-003d3fab-f7ce-4892-b925-c2280d3a9ae2 for instance with vm_state building and task_state spawning. [ 1380.899467] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1380.899467] env[63241]: value = "task-1819935" [ 1380.899467] env[63241]: _type = "Task" [ 1380.899467] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.912162] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819935, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.913207] env[63241]: DEBUG nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1380.913449] env[63241]: DEBUG nova.compute.provider_tree [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1380.926020] env[63241]: DEBUG nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1380.948155] env[63241]: DEBUG nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1381.077062] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819933, 'name': CreateVM_Task, 'duration_secs': 0.605349} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.077390] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1381.078559] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-376937', 'volume_id': 'c32b5066-e324-4377-90d0-ef224dd92932', 'name': 'volume-c32b5066-e324-4377-90d0-ef224dd92932', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c32b5066-e324-4377-90d0-ef224dd92932', 'serial': 'c32b5066-e324-4377-90d0-ef224dd92932'}, 'boot_index': 0, 'attachment_id': 'ff799f61-fd73-4fc3-91cc-f0c755791acc', 'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=63241) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1381.078906] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Root volume attach. Driver type: vmdk {{(pid=63241) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1381.084360] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14b2f71-b516-4d66-bbbd-5963402f379f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.093733] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cda1c4c-4720-4894-a9e3-8a8ca34211bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.102720] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ccf1c8b-a86a-4047-9038-d4461d1a080e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.114991] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1381.117643] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Releasing lock "refresh_cache-44508cc6-c576-4c30-8559-75118ceba02a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.117897] env[63241]: DEBUG nova.compute.manager [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Received event network-changed-279c7c67-cf23-442a-accf-544adeda8d12 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1381.118069] env[63241]: DEBUG nova.compute.manager [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Refreshing instance network info cache due to event network-changed-279c7c67-cf23-442a-accf-544adeda8d12. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1381.118426] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Acquiring lock "refresh_cache-a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.118470] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Acquired lock "refresh_cache-a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.118651] env[63241]: DEBUG nova.network.neutron [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Refreshing network info cache for port 279c7c67-cf23-442a-accf-544adeda8d12 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1381.119931] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-07094241-078e-411c-b83f-8db8db21443f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.126807] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-14857290-aba2-4343-b6a9-70c3d5e32f07 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.135811] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Waiting for the task: (returnval){ [ 1381.135811] env[63241]: value = "task-1819937" [ 1381.135811] env[63241]: _type = "Task" [ 1381.135811] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.137278] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1381.137278] env[63241]: value = "task-1819936" [ 1381.137278] env[63241]: _type = "Task" [ 1381.137278] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.155302] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819936, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.155541] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819937, 'name': RelocateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.242241] env[63241]: DEBUG nova.network.neutron [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1381.415564] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819935, 'name': ReconfigVM_Task, 'duration_secs': 0.485631} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.415564] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 0c72c98b-57f0-44e5-9159-490b27eac3a6/0c72c98b-57f0-44e5-9159-490b27eac3a6.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1381.417103] env[63241]: DEBUG nova.network.neutron [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Updating instance_info_cache with network_info: [{"id": "003d3fab-f7ce-4892-b925-c2280d3a9ae2", "address": "fa:16:3e:66:c4:d9", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap003d3fab-f7", "ovs_interfaceid": "003d3fab-f7ce-4892-b925-c2280d3a9ae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.417423] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bd87490-83b4-4ea2-8741-6a635122ae4f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.424279] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1381.424279] env[63241]: value = "task-1819938" [ 1381.424279] env[63241]: _type = "Task" [ 1381.424279] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.434411] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819938, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.442249] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c761867-eb9a-4b08-811a-d7a07b7afcc4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.452014] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb5ef53-1786-4c23-9714-936a16c1a3a2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.490586] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b29b0c2-03ef-482d-872e-bcf4e59ef612 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.498886] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5228d613-73be-453c-842d-4c4ae6c21023 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.194293] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "refresh_cache-11b1888e-95ec-4166-9219-0c38f8817dd4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.194618] env[63241]: DEBUG nova.compute.manager [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Instance network_info: |[{"id": "003d3fab-f7ce-4892-b925-c2280d3a9ae2", "address": "fa:16:3e:66:c4:d9", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap003d3fab-f7", "ovs_interfaceid": "003d3fab-f7ce-4892-b925-c2280d3a9ae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1382.203119] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:c4:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dacd109c-2442-41b8-b612-7ed3efbdaa94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '003d3fab-f7ce-4892-b925-c2280d3a9ae2', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1382.210422] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Creating folder: Project (9c64d07a686b414f93ec4c599307498f). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1382.214490] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquiring lock "c390d1ca-a199-4df6-847a-b543630a7bf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.214739] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "c390d1ca-a199-4df6-847a-b543630a7bf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.216509] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-968fd7e0-8e8f-4d89-bb00-1b80f8b772f6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.230271] env[63241]: DEBUG nova.compute.provider_tree [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1382.238014] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819936, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.238256] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819937, 'name': RelocateVM_Task, 'duration_secs': 0.029841} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.243723] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1382.243723] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-376937', 'volume_id': 'c32b5066-e324-4377-90d0-ef224dd92932', 'name': 'volume-c32b5066-e324-4377-90d0-ef224dd92932', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c32b5066-e324-4377-90d0-ef224dd92932', 'serial': 'c32b5066-e324-4377-90d0-ef224dd92932'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1382.244152] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819938, 'name': Rename_Task, 'duration_secs': 0.248324} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.245726] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edddb63-87e3-4c7d-973e-97e7f9a19df0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.247739] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1382.248674] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca08e87a-ad4b-4ab5-bd5a-47f81bce0bf4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.252703] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Created folder: Project (9c64d07a686b414f93ec4c599307498f) in parent group-v376927. [ 1382.252921] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Creating folder: Instances. Parent ref: group-v376984. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1382.264097] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5898a88-ee30-4549-a632-4be70db9f15b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.270304] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cdb119-69af-4540-bfba-95eb05cf528f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.273192] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1382.273192] env[63241]: value = "task-1819940" [ 1382.273192] env[63241]: _type = "Task" [ 1382.273192] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.295119] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] volume-c32b5066-e324-4377-90d0-ef224dd92932/volume-c32b5066-e324-4377-90d0-ef224dd92932.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1382.299889] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d8d4a6b-d1c4-4636-a5a0-9d91c59abfaf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.314242] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Created folder: Instances in parent group-v376984. [ 1382.314242] env[63241]: DEBUG oslo.service.loopingcall [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1382.314807] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819940, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.315288] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1382.315889] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4096333-37c9-4ae4-975a-a16e3b5ea41a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.331166] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Waiting for the task: (returnval){ [ 1382.331166] env[63241]: value = "task-1819942" [ 1382.331166] env[63241]: _type = "Task" [ 1382.331166] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.335842] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1382.335842] env[63241]: value = "task-1819943" [ 1382.335842] env[63241]: _type = "Task" [ 1382.335842] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.345591] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819942, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.351076] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819943, 'name': CreateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.626772] env[63241]: DEBUG nova.network.neutron [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Updated VIF entry in instance network info cache for port 279c7c67-cf23-442a-accf-544adeda8d12. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1382.627246] env[63241]: DEBUG nova.network.neutron [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Updating instance_info_cache with network_info: [{"id": "279c7c67-cf23-442a-accf-544adeda8d12", "address": "fa:16:3e:b8:9d:da", "network": {"id": "3adeb608-f93c-4422-b870-1d1726c199e2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1887376543-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5925758ee5404bbba0f8c9678fcd1eef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap279c7c67-cf", "ovs_interfaceid": "279c7c67-cf23-442a-accf-544adeda8d12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.708964] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819936, 'name': CloneVM_Task, 'duration_secs': 1.539489} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.708964] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Created linked-clone VM from snapshot [ 1382.708964] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d163e0-a8dc-4585-87a2-3d0d49d0c6a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.719663] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Uploading image 2e5e8333-de95-4236-94c3-4f089c2b83a4 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1382.747332] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1382.747332] env[63241]: value = "vm-376983" [ 1382.747332] env[63241]: _type = "VirtualMachine" [ 1382.747332] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1382.747792] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-98ec036f-5288-4ae9-8985-76c1caf58870 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.755258] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lease: (returnval){ [ 1382.755258] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c019e4-17d4-d357-aa28-9f011bef435f" [ 1382.755258] env[63241]: _type = "HttpNfcLease" [ 1382.755258] env[63241]: } obtained for exporting VM: (result){ [ 1382.755258] env[63241]: value = "vm-376983" [ 1382.755258] env[63241]: _type = "VirtualMachine" [ 1382.755258] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1382.755573] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the lease: (returnval){ [ 1382.755573] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c019e4-17d4-d357-aa28-9f011bef435f" [ 1382.755573] env[63241]: _type = "HttpNfcLease" [ 1382.755573] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1382.763144] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1382.763144] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c019e4-17d4-d357-aa28-9f011bef435f" [ 1382.763144] env[63241]: _type = "HttpNfcLease" [ 1382.763144] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1382.764493] env[63241]: ERROR nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] [req-cccd46b4-e376-4d00-b127-45ad20755d27] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cccd46b4-e376-4d00-b127-45ad20755d27"}]} [ 1382.785093] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819940, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.787382] env[63241]: DEBUG nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1382.803059] env[63241]: DEBUG nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1382.803713] env[63241]: DEBUG nova.compute.provider_tree [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1382.816536] env[63241]: DEBUG nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1382.842701] env[63241]: DEBUG nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1382.850521] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819942, 'name': ReconfigVM_Task, 'duration_secs': 0.370693} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.850521] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Reconfigured VM instance instance-00000011 to attach disk [datastore1] volume-c32b5066-e324-4377-90d0-ef224dd92932/volume-c32b5066-e324-4377-90d0-ef224dd92932.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1382.859545] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2843b765-e5db-47d0-8900-c33ffdca0a3a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.875234] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819943, 'name': CreateVM_Task, 'duration_secs': 0.425234} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.878051] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1382.879776] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.879776] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.879776] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1382.880531] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38271756-52a0-4570-949d-28cd11eb2576 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.884701] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Waiting for the task: (returnval){ [ 1382.884701] env[63241]: value = "task-1819945" [ 1382.884701] env[63241]: _type = "Task" [ 1382.884701] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.886419] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1382.886419] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5283c592-2111-64c1-93d0-5ec54f5fc9fe" [ 1382.886419] env[63241]: _type = "Task" [ 1382.886419] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.906077] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5283c592-2111-64c1-93d0-5ec54f5fc9fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.906391] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819945, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.132511] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Releasing lock "refresh_cache-a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.133929] env[63241]: DEBUG nova.compute.manager [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Received event network-vif-plugged-f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.133929] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Acquiring lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.133929] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.134654] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.134654] env[63241]: DEBUG nova.compute.manager [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] No waiting events found dispatching network-vif-plugged-f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1383.134654] env[63241]: WARNING nova.compute.manager [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Received unexpected event network-vif-plugged-f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e for instance with vm_state building and task_state spawning. [ 1383.134654] env[63241]: DEBUG nova.compute.manager [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Received event network-changed-f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.134654] env[63241]: DEBUG nova.compute.manager [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Refreshing instance network info cache due to event network-changed-f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1383.134970] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Acquiring lock "refresh_cache-c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.134970] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Acquired lock "refresh_cache-c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.135160] env[63241]: DEBUG nova.network.neutron [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Refreshing network info cache for port f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.278750] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1383.278750] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c019e4-17d4-d357-aa28-9f011bef435f" [ 1383.278750] env[63241]: _type = "HttpNfcLease" [ 1383.278750] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1383.282392] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1383.282392] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c019e4-17d4-d357-aa28-9f011bef435f" [ 1383.282392] env[63241]: _type = "HttpNfcLease" [ 1383.282392] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1383.287661] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36f1646-9559-4443-b6a8-557eb7280267 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.307499] env[63241]: DEBUG oslo_vmware.api [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1819940, 'name': PowerOnVM_Task, 'duration_secs': 0.645796} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.307499] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52144b54-15a1-aee6-8025-edd8eecdd35e/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1383.309354] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52144b54-15a1-aee6-8025-edd8eecdd35e/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1383.310653] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1383.310768] env[63241]: INFO nova.compute.manager [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Took 11.58 seconds to spawn the instance on the hypervisor. [ 1383.310957] env[63241]: DEBUG nova.compute.manager [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1383.312537] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83e99e3-1dee-45b8-bc3f-38e6e1795b86 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.400224] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819945, 'name': ReconfigVM_Task, 'duration_secs': 0.142874} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.404742] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-376937', 'volume_id': 'c32b5066-e324-4377-90d0-ef224dd92932', 'name': 'volume-c32b5066-e324-4377-90d0-ef224dd92932', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c32b5066-e324-4377-90d0-ef224dd92932', 'serial': 'c32b5066-e324-4377-90d0-ef224dd92932'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1383.405890] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5283c592-2111-64c1-93d0-5ec54f5fc9fe, 'name': SearchDatastore_Task, 'duration_secs': 0.016468} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.405890] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23730182-7321-4940-9a3e-9e2375e332dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.409132] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.409132] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1383.412093] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.412336] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.412521] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1383.415333] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80ae8f27-0a0c-41ed-977b-ffc741b2c0ce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.427740] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Waiting for the task: (returnval){ [ 1383.427740] env[63241]: value = "task-1819946" [ 1383.427740] env[63241]: _type = "Task" [ 1383.427740] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.429207] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1383.429372] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1383.436480] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e9acb51-72ee-44f2-aad7-7df9e936db22 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.448760] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1383.448760] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523b13a8-0681-aa52-87cb-aec009cc65e0" [ 1383.448760] env[63241]: _type = "Task" [ 1383.448760] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.449084] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819946, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.454779] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0493ebe4-ba07-4e33-adb2-dac5655ec489 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.470385] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523b13a8-0681-aa52-87cb-aec009cc65e0, 'name': SearchDatastore_Task, 'duration_secs': 0.011345} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.473902] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a565e644-521e-4838-bc74-c4faaae096c7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.488214] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1383.488214] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5228b9e8-49d5-7700-8753-fb646f877b6d" [ 1383.488214] env[63241]: _type = "Task" [ 1383.488214] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.503272] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5228b9e8-49d5-7700-8753-fb646f877b6d, 'name': SearchDatastore_Task, 'duration_secs': 0.012526} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.504499] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.504757] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 11b1888e-95ec-4166-9219-0c38f8817dd4/11b1888e-95ec-4166-9219-0c38f8817dd4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1383.505515] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4222cb60-198e-40ae-b6b4-caf08465430c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.508218] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5fb80757-4d15-474a-936d-79995214cee5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.518746] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1383.518746] env[63241]: value = "task-1819947" [ 1383.518746] env[63241]: _type = "Task" [ 1383.518746] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.520015] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c34e462-7a47-4f89-8510-fedeaa00bbb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.534712] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819947, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.560277] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5577c797-20de-42f4-9fe3-75cde0cc12c4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.568397] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f20bd1-47d4-4720-bea8-bd750514785a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.583593] env[63241]: DEBUG nova.compute.provider_tree [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1383.612937] env[63241]: DEBUG nova.compute.manager [req-6f6d4ffe-0079-4294-bff9-e2d10bc06ac5 req-379f4ca1-73f8-4ea4-858c-591e1fdedf4f service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Received event network-changed-cd1c9913-c7c6-4258-9006-ee7987594482 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.612937] env[63241]: DEBUG nova.compute.manager [req-6f6d4ffe-0079-4294-bff9-e2d10bc06ac5 req-379f4ca1-73f8-4ea4-858c-591e1fdedf4f service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Refreshing instance network info cache due to event network-changed-cd1c9913-c7c6-4258-9006-ee7987594482. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1383.613069] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f6d4ffe-0079-4294-bff9-e2d10bc06ac5 req-379f4ca1-73f8-4ea4-858c-591e1fdedf4f service nova] Acquiring lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.613163] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f6d4ffe-0079-4294-bff9-e2d10bc06ac5 req-379f4ca1-73f8-4ea4-858c-591e1fdedf4f service nova] Acquired lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.613336] env[63241]: DEBUG nova.network.neutron [req-6f6d4ffe-0079-4294-bff9-e2d10bc06ac5 req-379f4ca1-73f8-4ea4-858c-591e1fdedf4f service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Refreshing network info cache for port cd1c9913-c7c6-4258-9006-ee7987594482 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.751588] env[63241]: DEBUG nova.compute.manager [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Received event network-changed-003d3fab-f7ce-4892-b925-c2280d3a9ae2 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.751801] env[63241]: DEBUG nova.compute.manager [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Refreshing instance network info cache due to event network-changed-003d3fab-f7ce-4892-b925-c2280d3a9ae2. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1383.752548] env[63241]: DEBUG oslo_concurrency.lockutils [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] Acquiring lock "refresh_cache-11b1888e-95ec-4166-9219-0c38f8817dd4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.752548] env[63241]: DEBUG oslo_concurrency.lockutils [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] Acquired lock "refresh_cache-11b1888e-95ec-4166-9219-0c38f8817dd4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.752548] env[63241]: DEBUG nova.network.neutron [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Refreshing network info cache for port 003d3fab-f7ce-4892-b925-c2280d3a9ae2 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.900369] env[63241]: INFO nova.compute.manager [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Took 30.03 seconds to build instance. [ 1383.947361] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819946, 'name': Rename_Task, 'duration_secs': 0.156965} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.948627] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1383.948627] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb470bcf-fb1b-485c-bfe5-23ca7e98f4bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.960528] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Waiting for the task: (returnval){ [ 1383.960528] env[63241]: value = "task-1819948" [ 1383.960528] env[63241]: _type = "Task" [ 1383.960528] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.970052] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.974040] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.031707] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819947, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.104549] env[63241]: DEBUG nova.network.neutron [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Updated VIF entry in instance network info cache for port f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1384.105787] env[63241]: DEBUG nova.network.neutron [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Updating instance_info_cache with network_info: [{"id": "f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e", "address": "fa:16:3e:3a:c0:84", "network": {"id": "49c2943c-4145-4b92-aa07-343c928ed805", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1351064395-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6300a112f8f749e9a26007bb80dee152", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9f1a2e1-b5", "ovs_interfaceid": "f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.146837] env[63241]: DEBUG nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 38 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1384.150544] env[63241]: DEBUG nova.compute.provider_tree [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 38 to 39 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1384.150544] env[63241]: DEBUG nova.compute.provider_tree [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1384.401499] env[63241]: DEBUG oslo_concurrency.lockutils [None req-332fdd5a-55cd-4ffb-9831-d3edefb54a4e tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "0c72c98b-57f0-44e5-9159-490b27eac3a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.276s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.471082] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819948, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.536677] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819947, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.588377] env[63241]: DEBUG nova.network.neutron [req-6f6d4ffe-0079-4294-bff9-e2d10bc06ac5 req-379f4ca1-73f8-4ea4-858c-591e1fdedf4f service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Updated VIF entry in instance network info cache for port cd1c9913-c7c6-4258-9006-ee7987594482. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1384.588864] env[63241]: DEBUG nova.network.neutron [req-6f6d4ffe-0079-4294-bff9-e2d10bc06ac5 req-379f4ca1-73f8-4ea4-858c-591e1fdedf4f service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Updating instance_info_cache with network_info: [{"id": "cd1c9913-c7c6-4258-9006-ee7987594482", "address": "fa:16:3e:93:7c:04", "network": {"id": "d2d9d4dd-5702-4a7f-a301-7f4d520edfbf", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1366579599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b98d078fef845cf87f6d932885790e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd1c9913-c7", "ovs_interfaceid": "cd1c9913-c7c6-4258-9006-ee7987594482", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.609342] env[63241]: DEBUG oslo_concurrency.lockutils [req-6e48b57a-be16-4a97-89ae-3b2a805a47ec req-369a929b-b956-4e49-8da0-cbb4681fd64e service nova] Releasing lock "refresh_cache-c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.654722] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.471s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.657389] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.652s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.659837] env[63241]: INFO nova.compute.claims [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1384.694865] env[63241]: INFO nova.scheduler.client.report [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Deleted allocations for instance 0440c0a8-f065-4a82-b190-33279e7c0d93 [ 1384.905164] env[63241]: DEBUG nova.compute.manager [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1384.972437] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819948, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.022247] env[63241]: DEBUG nova.network.neutron [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Updated VIF entry in instance network info cache for port 003d3fab-f7ce-4892-b925-c2280d3a9ae2. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1385.024315] env[63241]: DEBUG nova.network.neutron [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Updating instance_info_cache with network_info: [{"id": "003d3fab-f7ce-4892-b925-c2280d3a9ae2", "address": "fa:16:3e:66:c4:d9", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap003d3fab-f7", "ovs_interfaceid": "003d3fab-f7ce-4892-b925-c2280d3a9ae2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.039037] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819947, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.278433} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.039800] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 11b1888e-95ec-4166-9219-0c38f8817dd4/11b1888e-95ec-4166-9219-0c38f8817dd4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1385.040079] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1385.040392] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9451f591-2cac-48dd-bc84-dc89712c9a71 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.049745] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1385.049745] env[63241]: value = "task-1819949" [ 1385.049745] env[63241]: _type = "Task" [ 1385.049745] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.058422] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.091601] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f6d4ffe-0079-4294-bff9-e2d10bc06ac5 req-379f4ca1-73f8-4ea4-858c-591e1fdedf4f service nova] Releasing lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.206699] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc1922fd-785d-4ea3-b3db-ee1c8d05563f tempest-ServerExternalEventsTest-1747474501 tempest-ServerExternalEventsTest-1747474501-project-member] Lock "0440c0a8-f065-4a82-b190-33279e7c0d93" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.650s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.430285] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.451494] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.451793] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1385.451915] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1385.471755] env[63241]: DEBUG oslo_vmware.api [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1819948, 'name': PowerOnVM_Task, 'duration_secs': 1.210642} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.472112] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1385.472294] env[63241]: INFO nova.compute.manager [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Took 7.01 seconds to spawn the instance on the hypervisor. [ 1385.472503] env[63241]: DEBUG nova.compute.manager [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1385.473433] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f3a536-4f0f-4bde-9008-821b13563c4d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.525198] env[63241]: DEBUG oslo_concurrency.lockutils [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] Releasing lock "refresh_cache-11b1888e-95ec-4166-9219-0c38f8817dd4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.525470] env[63241]: DEBUG nova.compute.manager [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Received event network-changed-1e0eeec7-9caf-4069-8cad-d1d0d038ea2b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1385.525639] env[63241]: DEBUG nova.compute.manager [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Refreshing instance network info cache due to event network-changed-1e0eeec7-9caf-4069-8cad-d1d0d038ea2b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1385.525871] env[63241]: DEBUG oslo_concurrency.lockutils [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] Acquiring lock "refresh_cache-3c51d4dc-5a2c-4483-9aa5-8bab532971d4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.526055] env[63241]: DEBUG oslo_concurrency.lockutils [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] Acquired lock "refresh_cache-3c51d4dc-5a2c-4483-9aa5-8bab532971d4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.526231] env[63241]: DEBUG nova.network.neutron [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Refreshing network info cache for port 1e0eeec7-9caf-4069-8cad-d1d0d038ea2b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1385.561493] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819949, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070302} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.561775] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1385.562666] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2efe1b-ebed-454b-ac39-95b654a1e9cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.588718] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 11b1888e-95ec-4166-9219-0c38f8817dd4/11b1888e-95ec-4166-9219-0c38f8817dd4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1385.589874] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b13aec5-6c74-4135-af31-3f535cfd0697 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.618277] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1385.618277] env[63241]: value = "task-1819950" [ 1385.618277] env[63241]: _type = "Task" [ 1385.618277] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.623130] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819950, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.970397] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Skipping network cache update for instance because it is Building. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1385.970397] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Skipping network cache update for instance because it is Building. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1385.970557] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Skipping network cache update for instance because it is Building. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1386.004578] env[63241]: INFO nova.compute.manager [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Took 29.03 seconds to build instance. [ 1386.014451] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-b4182e53-50db-4256-b376-b00100778935" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.017317] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-b4182e53-50db-4256-b376-b00100778935" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.017317] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: b4182e53-50db-4256-b376-b00100778935] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1386.017317] env[63241]: DEBUG nova.objects.instance [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lazy-loading 'info_cache' on Instance uuid b4182e53-50db-4256-b376-b00100778935 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1386.128215] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819950, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.221168] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2341c5-7d77-4238-be0f-3dee7e23ec5e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.229794] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a1b24c-a547-4706-b6c5-ceddfc3c0042 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.270439] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f951826-609b-4355-8346-dc97ecff72ba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.279602] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62afa1c5-4ec0-4e5e-a87a-cb3026bb5378 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.295880] env[63241]: DEBUG nova.compute.provider_tree [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.510679] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1bcae36e-9ef6-437f-80ef-32deab432d14 tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.108s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.625145] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819950, 'name': ReconfigVM_Task, 'duration_secs': 0.559149} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.625447] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 11b1888e-95ec-4166-9219-0c38f8817dd4/11b1888e-95ec-4166-9219-0c38f8817dd4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1386.626353] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df71eb80-a361-45a3-9019-fd0d58dfbd1e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.633836] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1386.633836] env[63241]: value = "task-1819951" [ 1386.633836] env[63241]: _type = "Task" [ 1386.633836] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.648024] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819951, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.686125] env[63241]: DEBUG nova.network.neutron [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Updated VIF entry in instance network info cache for port 1e0eeec7-9caf-4069-8cad-d1d0d038ea2b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1386.686125] env[63241]: DEBUG nova.network.neutron [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Updating instance_info_cache with network_info: [{"id": "1e0eeec7-9caf-4069-8cad-d1d0d038ea2b", "address": "fa:16:3e:e4:1a:ee", "network": {"id": "1cb2bfac-6339-4368-9554-14333bff6b76", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-149124339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bfb1bfc0a12b45cf84d18d038c94fe3a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e0eeec7-9c", "ovs_interfaceid": "1e0eeec7-9caf-4069-8cad-d1d0d038ea2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.803615] env[63241]: DEBUG nova.scheduler.client.report [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1387.016270] env[63241]: DEBUG nova.compute.manager [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1387.061893] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: b4182e53-50db-4256-b376-b00100778935] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1387.149212] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819951, 'name': Rename_Task, 'duration_secs': 0.314414} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.149212] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1387.149212] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61adbe0e-b215-4ec9-abdc-84c910258d9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.156870] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1387.156870] env[63241]: value = "task-1819952" [ 1387.156870] env[63241]: _type = "Task" [ 1387.156870] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.164821] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819952, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.189048] env[63241]: DEBUG oslo_concurrency.lockutils [req-e1d0f0e6-753f-4435-aea5-c5a5210b470e req-53648880-5d7f-4895-a8f6-ea19697362fd service nova] Releasing lock "refresh_cache-3c51d4dc-5a2c-4483-9aa5-8bab532971d4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.314440] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.318358] env[63241]: DEBUG nova.compute.manager [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1387.320695] env[63241]: DEBUG oslo_concurrency.lockutils [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.472s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.321175] env[63241]: DEBUG nova.objects.instance [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] [instance: b4182e53-50db-4256-b376-b00100778935] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1387.548800] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.670660] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819952, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.738334] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: b4182e53-50db-4256-b376-b00100778935] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.827918] env[63241]: DEBUG nova.compute.utils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1387.837632] env[63241]: DEBUG nova.compute.manager [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1387.837828] env[63241]: DEBUG nova.network.neutron [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1388.001789] env[63241]: DEBUG nova.policy [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78657a2bc34d4bb9922678ed287530f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18094134f49b4e84b83e97631bc22903', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1388.173736] env[63241]: DEBUG oslo_vmware.api [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819952, 'name': PowerOnVM_Task, 'duration_secs': 0.626374} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.174612] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1388.174869] env[63241]: INFO nova.compute.manager [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Took 9.09 seconds to spawn the instance on the hypervisor. [ 1388.175084] env[63241]: DEBUG nova.compute.manager [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1388.176650] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6258bd36-a7be-4b0a-83d4-8c76e7399bfa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.241957] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-b4182e53-50db-4256-b376-b00100778935" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.242264] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: b4182e53-50db-4256-b376-b00100778935] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1388.242880] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.243173] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.243533] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1388.338531] env[63241]: DEBUG nova.compute.manager [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1388.344370] env[63241]: DEBUG oslo_concurrency.lockutils [None req-499df72f-a14a-408e-bc32-3923f0466e6f tempest-ServersAdmin275Test-612860010 tempest-ServersAdmin275Test-612860010-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.345900] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.463s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.348026] env[63241]: INFO nova.compute.claims [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1388.452138] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.700918] env[63241]: INFO nova.compute.manager [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Took 31.12 seconds to build instance. [ 1388.749663] env[63241]: DEBUG nova.network.neutron [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Successfully created port: c6bc0033-163b-4b6a-8577-877f59a975dc {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1389.204927] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d323381-d0b3-4db8-99ea-d94ad79debcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "11b1888e-95ec-4166-9219-0c38f8817dd4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.554s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.352953] env[63241]: DEBUG nova.compute.manager [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1389.402806] env[63241]: DEBUG nova.virt.hardware [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1389.403085] env[63241]: DEBUG nova.virt.hardware [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1389.403607] env[63241]: DEBUG nova.virt.hardware [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1389.403914] env[63241]: DEBUG nova.virt.hardware [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1389.404126] env[63241]: DEBUG nova.virt.hardware [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1389.404330] env[63241]: DEBUG nova.virt.hardware [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1389.404822] env[63241]: DEBUG nova.virt.hardware [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1389.405039] env[63241]: DEBUG nova.virt.hardware [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1389.405307] env[63241]: DEBUG nova.virt.hardware [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1389.405890] env[63241]: DEBUG nova.virt.hardware [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1389.405890] env[63241]: DEBUG nova.virt.hardware [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1389.407069] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ad7a1d-ddd9-4b74-be5b-24d0fbce382f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.418773] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8634bf-436c-47e8-93d6-780a84443a4b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.709780] env[63241]: DEBUG nova.compute.manager [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1389.845228] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fedec3-ca2b-4b8e-b078-59a6841f80b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.854525] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff26287-93b2-49df-9356-02cd993fd513 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.890863] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8465e913-8eb9-4409-963f-5257148f5c49 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.899517] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb19379b-c23f-48ca-b745-d3fc258560f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.918387] env[63241]: DEBUG nova.compute.provider_tree [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1390.051269] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "2b1805b3-2e03-410f-8222-64b8542d4a43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.051648] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.241896] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.416707] env[63241]: DEBUG nova.compute.manager [req-f18c798e-4f80-48bd-aa66-64f08ac41ec5 req-3df72c21-4690-47aa-bbd9-e661ef0017b1 service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Received event network-changed-f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1390.416707] env[63241]: DEBUG nova.compute.manager [req-f18c798e-4f80-48bd-aa66-64f08ac41ec5 req-3df72c21-4690-47aa-bbd9-e661ef0017b1 service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Refreshing instance network info cache due to event network-changed-f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1390.417284] env[63241]: DEBUG oslo_concurrency.lockutils [req-f18c798e-4f80-48bd-aa66-64f08ac41ec5 req-3df72c21-4690-47aa-bbd9-e661ef0017b1 service nova] Acquiring lock "refresh_cache-c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.417482] env[63241]: DEBUG oslo_concurrency.lockutils [req-f18c798e-4f80-48bd-aa66-64f08ac41ec5 req-3df72c21-4690-47aa-bbd9-e661ef0017b1 service nova] Acquired lock "refresh_cache-c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.421197] env[63241]: DEBUG nova.network.neutron [req-f18c798e-4f80-48bd-aa66-64f08ac41ec5 req-3df72c21-4690-47aa-bbd9-e661ef0017b1 service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Refreshing network info cache for port f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1390.423575] env[63241]: DEBUG nova.scheduler.client.report [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1390.451357] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.494152] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "78894fda-8309-430a-ab38-ce1a415d83d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.494832] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "78894fda-8309-430a-ab38-ce1a415d83d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.931221] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.585s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.931732] env[63241]: DEBUG nova.compute.manager [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1390.934535] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.030s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.938624] env[63241]: DEBUG nova.objects.instance [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lazy-loading 'resources' on Instance uuid b4182e53-50db-4256-b376-b00100778935 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1391.421659] env[63241]: DEBUG nova.network.neutron [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Successfully updated port: c6bc0033-163b-4b6a-8577-877f59a975dc {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1391.448713] env[63241]: DEBUG nova.compute.utils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1391.450792] env[63241]: DEBUG nova.compute.manager [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1391.450973] env[63241]: DEBUG nova.network.neutron [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1391.453646] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.453818] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.513578] env[63241]: DEBUG nova.network.neutron [req-f18c798e-4f80-48bd-aa66-64f08ac41ec5 req-3df72c21-4690-47aa-bbd9-e661ef0017b1 service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Updated VIF entry in instance network info cache for port f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1391.514013] env[63241]: DEBUG nova.network.neutron [req-f18c798e-4f80-48bd-aa66-64f08ac41ec5 req-3df72c21-4690-47aa-bbd9-e661ef0017b1 service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Updating instance_info_cache with network_info: [{"id": "f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e", "address": "fa:16:3e:3a:c0:84", "network": {"id": "49c2943c-4145-4b92-aa07-343c928ed805", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1351064395-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6300a112f8f749e9a26007bb80dee152", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9f1a2e1-b5", "ovs_interfaceid": "f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1391.558276] env[63241]: DEBUG nova.policy [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3932385355eb40efbbb7ad0df83e2d31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7bab059b7aff4961a226ef988e125438', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1391.930104] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "refresh_cache-94a604da-ad3d-415a-aa92-d648e3da803d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.930277] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "refresh_cache-94a604da-ad3d-415a-aa92-d648e3da803d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.930437] env[63241]: DEBUG nova.network.neutron [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1391.957131] env[63241]: DEBUG nova.compute.manager [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1392.018467] env[63241]: DEBUG oslo_concurrency.lockutils [req-f18c798e-4f80-48bd-aa66-64f08ac41ec5 req-3df72c21-4690-47aa-bbd9-e661ef0017b1 service nova] Releasing lock "refresh_cache-c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.044118] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62c52ab-7ffd-457e-ba71-193a4938e030 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.061458] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7057910a-1cd1-4a21-8a80-dbecfdf62a3c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.101609] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c38fbda-f206-4fca-84f9-984fce5ec7c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.111142] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80069aaf-6c01-4fdf-be86-9096c3cbe2e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.126103] env[63241]: DEBUG nova.compute.provider_tree [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1392.342029] env[63241]: DEBUG nova.network.neutron [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Successfully created port: 03370c0c-303d-4511-8cd8-44be5bad305a {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1392.519350] env[63241]: DEBUG nova.network.neutron [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1392.629807] env[63241]: DEBUG nova.scheduler.client.report [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1392.967726] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "11b1888e-95ec-4166-9219-0c38f8817dd4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.968377] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "11b1888e-95ec-4166-9219-0c38f8817dd4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.968377] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "11b1888e-95ec-4166-9219-0c38f8817dd4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.969532] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "11b1888e-95ec-4166-9219-0c38f8817dd4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.969873] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "11b1888e-95ec-4166-9219-0c38f8817dd4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.973128] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.974665] env[63241]: INFO nova.compute.manager [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Terminating instance [ 1392.979118] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.979118] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances with incomplete migration {{(pid=63241) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1392.981663] env[63241]: DEBUG nova.compute.manager [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1392.981756] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1392.982866] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc76392e-f5e6-4744-9570-ce747ee72ddf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.999283] env[63241]: DEBUG nova.compute.manager [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1393.005899] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1393.005899] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7578fe9-141f-43b5-8135-dcaff18c2c9f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.015203] env[63241]: DEBUG oslo_vmware.api [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1393.015203] env[63241]: value = "task-1819953" [ 1393.015203] env[63241]: _type = "Task" [ 1393.015203] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.028835] env[63241]: DEBUG oslo_vmware.api [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819953, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.044475] env[63241]: DEBUG nova.virt.hardware [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1393.044749] env[63241]: DEBUG nova.virt.hardware [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1393.044918] env[63241]: DEBUG nova.virt.hardware [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1393.045215] env[63241]: DEBUG nova.virt.hardware [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1393.047400] env[63241]: DEBUG nova.virt.hardware [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1393.047400] env[63241]: DEBUG nova.virt.hardware [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1393.047400] env[63241]: DEBUG nova.virt.hardware [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1393.047400] env[63241]: DEBUG nova.virt.hardware [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1393.047400] env[63241]: DEBUG nova.virt.hardware [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1393.047623] env[63241]: DEBUG nova.virt.hardware [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1393.047623] env[63241]: DEBUG nova.virt.hardware [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1393.047623] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "343a7e90-5e55-4125-8475-44050f267987" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.047623] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "343a7e90-5e55-4125-8475-44050f267987" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.048421] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0846c340-a213-4520-9dae-bb8fe29e0790 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.052455] env[63241]: DEBUG nova.network.neutron [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Updating instance_info_cache with network_info: [{"id": "c6bc0033-163b-4b6a-8577-877f59a975dc", "address": "fa:16:3e:61:79:2a", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6bc0033-16", "ovs_interfaceid": "c6bc0033-163b-4b6a-8577-877f59a975dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.062427] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7a6ca8-0cf4-4cf2-991e-4228627a4d28 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.141241] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.207s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.146055] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.967s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.149954] env[63241]: INFO nova.compute.claims [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1393.186783] env[63241]: INFO nova.scheduler.client.report [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Deleted allocations for instance b4182e53-50db-4256-b376-b00100778935 [ 1393.348326] env[63241]: DEBUG nova.compute.manager [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Received event network-vif-plugged-c6bc0033-163b-4b6a-8577-877f59a975dc {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1393.348326] env[63241]: DEBUG oslo_concurrency.lockutils [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] Acquiring lock "94a604da-ad3d-415a-aa92-d648e3da803d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.348553] env[63241]: DEBUG oslo_concurrency.lockutils [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] Lock "94a604da-ad3d-415a-aa92-d648e3da803d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.348901] env[63241]: DEBUG oslo_concurrency.lockutils [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] Lock "94a604da-ad3d-415a-aa92-d648e3da803d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.349165] env[63241]: DEBUG nova.compute.manager [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] No waiting events found dispatching network-vif-plugged-c6bc0033-163b-4b6a-8577-877f59a975dc {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1393.349415] env[63241]: WARNING nova.compute.manager [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Received unexpected event network-vif-plugged-c6bc0033-163b-4b6a-8577-877f59a975dc for instance with vm_state building and task_state spawning. [ 1393.349715] env[63241]: DEBUG nova.compute.manager [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Received event network-changed-c6bc0033-163b-4b6a-8577-877f59a975dc {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1393.349983] env[63241]: DEBUG nova.compute.manager [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Refreshing instance network info cache due to event network-changed-c6bc0033-163b-4b6a-8577-877f59a975dc. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1393.350474] env[63241]: DEBUG oslo_concurrency.lockutils [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] Acquiring lock "refresh_cache-94a604da-ad3d-415a-aa92-d648e3da803d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1393.531338] env[63241]: DEBUG oslo_vmware.api [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819953, 'name': PowerOffVM_Task, 'duration_secs': 0.239333} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.532259] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1393.532259] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1393.532804] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-045a6106-9632-4768-950f-561fc355abe5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.555319] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "refresh_cache-94a604da-ad3d-415a-aa92-d648e3da803d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.556303] env[63241]: DEBUG nova.compute.manager [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Instance network_info: |[{"id": "c6bc0033-163b-4b6a-8577-877f59a975dc", "address": "fa:16:3e:61:79:2a", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6bc0033-16", "ovs_interfaceid": "c6bc0033-163b-4b6a-8577-877f59a975dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1393.556303] env[63241]: DEBUG oslo_concurrency.lockutils [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] Acquired lock "refresh_cache-94a604da-ad3d-415a-aa92-d648e3da803d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.556463] env[63241]: DEBUG nova.network.neutron [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Refreshing network info cache for port c6bc0033-163b-4b6a-8577-877f59a975dc {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1393.558186] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:79:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '184687d6-125a-4b58-bb5b-fdb404088eda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6bc0033-163b-4b6a-8577-877f59a975dc', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1393.575036] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Creating folder: Project (18094134f49b4e84b83e97631bc22903). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.575036] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a2f06bf-0f37-4f31-a92e-4f91feb39fcd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.586771] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Created folder: Project (18094134f49b4e84b83e97631bc22903) in parent group-v376927. [ 1393.587081] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Creating folder: Instances. Parent ref: group-v376987. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.587385] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3cc942f-51ef-490b-b5bb-cf18ee8679f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.601112] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Created folder: Instances in parent group-v376987. [ 1393.601112] env[63241]: DEBUG oslo.service.loopingcall [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.601112] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1393.601112] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5caec5ef-b879-4437-8926-1b873a6cf57e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.625778] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1393.626053] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1393.626303] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleting the datastore file [datastore1] 11b1888e-95ec-4166-9219-0c38f8817dd4 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1393.627165] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43be78e1-5049-4618-8398-82f6f6ea031b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.631617] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1393.631617] env[63241]: value = "task-1819957" [ 1393.631617] env[63241]: _type = "Task" [ 1393.631617] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.636902] env[63241]: DEBUG oslo_vmware.api [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1393.636902] env[63241]: value = "task-1819958" [ 1393.636902] env[63241]: _type = "Task" [ 1393.636902] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.644586] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819957, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.652188] env[63241]: DEBUG oslo_vmware.api [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819958, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.703275] env[63241]: DEBUG oslo_concurrency.lockutils [None req-879cca69-3e18-44bc-81e9-7b232781b1f1 tempest-ServersAdmin275Test-2053891055 tempest-ServersAdmin275Test-2053891055-project-member] Lock "b4182e53-50db-4256-b376-b00100778935" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.703s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.973862] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52144b54-15a1-aee6-8025-edd8eecdd35e/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1393.974543] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496fce12-a41a-4d98-a019-b5ace5ab5f97 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.981855] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52144b54-15a1-aee6-8025-edd8eecdd35e/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1393.981970] env[63241]: ERROR oslo_vmware.rw_handles [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52144b54-15a1-aee6-8025-edd8eecdd35e/disk-0.vmdk due to incomplete transfer. [ 1393.984933] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-766cf823-6085-44ac-9e3e-5cbf234b80e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.990283] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52144b54-15a1-aee6-8025-edd8eecdd35e/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1393.990458] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Uploaded image 2e5e8333-de95-4236-94c3-4f089c2b83a4 to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1393.993813] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1393.994224] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-068af0dd-0c1a-4eea-9523-b6228fccf013 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.000664] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1394.000664] env[63241]: value = "task-1819959" [ 1394.000664] env[63241]: _type = "Task" [ 1394.000664] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.012256] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819959, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.142132] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819957, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.154279] env[63241]: DEBUG oslo_vmware.api [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1819958, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189493} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.154279] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1394.154387] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1394.154494] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1394.154676] env[63241]: INFO nova.compute.manager [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1394.154919] env[63241]: DEBUG oslo.service.loopingcall [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1394.155215] env[63241]: DEBUG nova.compute.manager [-] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1394.155324] env[63241]: DEBUG nova.network.neutron [-] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1394.413536] env[63241]: DEBUG nova.network.neutron [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Updated VIF entry in instance network info cache for port c6bc0033-163b-4b6a-8577-877f59a975dc. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1394.413909] env[63241]: DEBUG nova.network.neutron [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Updating instance_info_cache with network_info: [{"id": "c6bc0033-163b-4b6a-8577-877f59a975dc", "address": "fa:16:3e:61:79:2a", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6bc0033-16", "ovs_interfaceid": "c6bc0033-163b-4b6a-8577-877f59a975dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.442150] env[63241]: DEBUG nova.network.neutron [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Successfully updated port: 03370c0c-303d-4511-8cd8-44be5bad305a {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1394.510530] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819959, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.620182] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6f37cc-716b-4009-b9d2-39b672d6efb9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.627951] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a390913-52c5-4ae4-942d-298635538782 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.669628] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be57d1fe-27e2-412a-8554-c9772d9aaf78 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.676351] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819957, 'name': CreateVM_Task, 'duration_secs': 0.516412} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.676351] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1394.677502] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.677670] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.678037] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1394.681415] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1014d53d-27e6-4eb7-a01f-d5c955d5f7b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.683428] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c44b5f-57ac-424d-9e03-75ce0d03a085 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.697073] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1394.697073] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526f1435-317a-194e-efbf-cc43a8229a50" [ 1394.697073] env[63241]: _type = "Task" [ 1394.697073] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.706646] env[63241]: DEBUG nova.compute.provider_tree [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1394.720961] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526f1435-317a-194e-efbf-cc43a8229a50, 'name': SearchDatastore_Task, 'duration_secs': 0.016362} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.722525] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.722525] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1394.722525] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.722525] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.722667] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1394.723128] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0406540-27c7-42b4-921f-b935bcdcc02d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.733649] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1394.733649] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1394.733818] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b21501b8-b508-4767-af4b-5abfd84bb30a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.739710] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1394.739710] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5211e0fd-6bff-3c75-f341-e3b93d4ab016" [ 1394.739710] env[63241]: _type = "Task" [ 1394.739710] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.747906] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5211e0fd-6bff-3c75-f341-e3b93d4ab016, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.919016] env[63241]: DEBUG oslo_concurrency.lockutils [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] Releasing lock "refresh_cache-94a604da-ad3d-415a-aa92-d648e3da803d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.919016] env[63241]: DEBUG nova.compute.manager [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Received event network-changed-cd1c9913-c7c6-4258-9006-ee7987594482 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1394.919016] env[63241]: DEBUG nova.compute.manager [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Refreshing instance network info cache due to event network-changed-cd1c9913-c7c6-4258-9006-ee7987594482. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1394.919016] env[63241]: DEBUG oslo_concurrency.lockutils [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] Acquiring lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.919016] env[63241]: DEBUG oslo_concurrency.lockutils [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] Acquired lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.919219] env[63241]: DEBUG nova.network.neutron [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Refreshing network info cache for port cd1c9913-c7c6-4258-9006-ee7987594482 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1394.944279] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquiring lock "refresh_cache-5fce9350-6d45-4bfb-a74b-f5b384ecb16c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.945177] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquired lock "refresh_cache-5fce9350-6d45-4bfb-a74b-f5b384ecb16c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.945379] env[63241]: DEBUG nova.network.neutron [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1395.017239] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819959, 'name': Destroy_Task, 'duration_secs': 0.550683} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.017919] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Destroyed the VM [ 1395.018211] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1395.018344] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ce01504c-c713-4b41-8fed-f66ccdbed458 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.026990] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1395.026990] env[63241]: value = "task-1819960" [ 1395.026990] env[63241]: _type = "Task" [ 1395.026990] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.035903] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819960, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.181418] env[63241]: DEBUG nova.network.neutron [-] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.213965] env[63241]: DEBUG nova.scheduler.client.report [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1395.251992] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5211e0fd-6bff-3c75-f341-e3b93d4ab016, 'name': SearchDatastore_Task, 'duration_secs': 0.008816} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.253050] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9835a7f7-d1b9-41ac-84b2-53daebd5bad1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.258873] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1395.258873] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529e54dd-3012-4bfc-ff6e-7a368d5280e7" [ 1395.258873] env[63241]: _type = "Task" [ 1395.258873] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.269648] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529e54dd-3012-4bfc-ff6e-7a368d5280e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.505412] env[63241]: DEBUG nova.network.neutron [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1395.539778] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.539778] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.547642] env[63241]: DEBUG oslo_vmware.api [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819960, 'name': RemoveSnapshot_Task, 'duration_secs': 0.418849} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.547904] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1395.548503] env[63241]: INFO nova.compute.manager [None req-6159771b-faf6-4d80-abcf-b028f2724baa tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Took 16.52 seconds to snapshot the instance on the hypervisor. [ 1395.599872] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "81854e13-e0c1-43a9-8529-678d56d57bbf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.599872] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "81854e13-e0c1-43a9-8529-678d56d57bbf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.643971] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "bef91c1c-a418-4464-ae7b-883ffb7e9695" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.644420] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "bef91c1c-a418-4464-ae7b-883ffb7e9695" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.686796] env[63241]: INFO nova.compute.manager [-] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Took 1.53 seconds to deallocate network for instance. [ 1395.719906] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.719906] env[63241]: DEBUG nova.compute.manager [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1395.724872] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.097s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.724872] env[63241]: DEBUG nova.objects.instance [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Lazy-loading 'resources' on Instance uuid 97890eda-0c1d-4423-acd2-60d3097c6f8a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1395.773283] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529e54dd-3012-4bfc-ff6e-7a368d5280e7, 'name': SearchDatastore_Task, 'duration_secs': 0.010169} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.774701] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.774701] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 94a604da-ad3d-415a-aa92-d648e3da803d/94a604da-ad3d-415a-aa92-d648e3da803d.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1395.774701] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c20ca2a0-e323-43a0-a3f2-1b66138df5c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.782983] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1395.782983] env[63241]: value = "task-1819961" [ 1395.782983] env[63241]: _type = "Task" [ 1395.782983] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.796747] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1819961, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.956402] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.015221] env[63241]: DEBUG nova.compute.manager [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Received event network-vif-plugged-03370c0c-303d-4511-8cd8-44be5bad305a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1396.015522] env[63241]: DEBUG oslo_concurrency.lockutils [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] Acquiring lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.015744] env[63241]: DEBUG oslo_concurrency.lockutils [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] Lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.015948] env[63241]: DEBUG oslo_concurrency.lockutils [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] Lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.016179] env[63241]: DEBUG nova.compute.manager [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] No waiting events found dispatching network-vif-plugged-03370c0c-303d-4511-8cd8-44be5bad305a {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1396.016572] env[63241]: WARNING nova.compute.manager [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Received unexpected event network-vif-plugged-03370c0c-303d-4511-8cd8-44be5bad305a for instance with vm_state building and task_state spawning. [ 1396.016756] env[63241]: DEBUG nova.compute.manager [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Received event network-changed-03370c0c-303d-4511-8cd8-44be5bad305a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1396.016850] env[63241]: DEBUG nova.compute.manager [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Refreshing instance network info cache due to event network-changed-03370c0c-303d-4511-8cd8-44be5bad305a. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1396.017029] env[63241]: DEBUG oslo_concurrency.lockutils [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] Acquiring lock "refresh_cache-5fce9350-6d45-4bfb-a74b-f5b384ecb16c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.066218] env[63241]: DEBUG nova.network.neutron [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Updated VIF entry in instance network info cache for port cd1c9913-c7c6-4258-9006-ee7987594482. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1396.066562] env[63241]: DEBUG nova.network.neutron [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Updating instance_info_cache with network_info: [{"id": "cd1c9913-c7c6-4258-9006-ee7987594482", "address": "fa:16:3e:93:7c:04", "network": {"id": "d2d9d4dd-5702-4a7f-a301-7f4d520edfbf", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1366579599-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b98d078fef845cf87f6d932885790e1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c791d09c-1086-4ee1-bcde-6ca7d259cabd", "external-id": "nsx-vlan-transportzone-990", "segmentation_id": 990, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd1c9913-c7", "ovs_interfaceid": "cd1c9913-c7c6-4258-9006-ee7987594482", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.161217] env[63241]: DEBUG nova.network.neutron [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Updating instance_info_cache with network_info: [{"id": "03370c0c-303d-4511-8cd8-44be5bad305a", "address": "fa:16:3e:4b:9a:9e", "network": {"id": "9442adce-80bf-4a10-8a22-a3b54d25cd68", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1232087337-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bab059b7aff4961a226ef988e125438", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cfbd1264-be3d-4ca9-953a-df79de7b010b", "external-id": "nsx-vlan-transportzone-543", "segmentation_id": 543, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03370c0c-30", "ovs_interfaceid": "03370c0c-303d-4511-8cd8-44be5bad305a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.196246] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.231590] env[63241]: DEBUG nova.compute.utils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1396.233967] env[63241]: DEBUG nova.compute.manager [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1396.234241] env[63241]: DEBUG nova.network.neutron [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1396.310982] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1819961, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482201} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.315378] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 94a604da-ad3d-415a-aa92-d648e3da803d/94a604da-ad3d-415a-aa92-d648e3da803d.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1396.315587] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1396.316300] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6d8eae1-f992-45c2-a3a7-448c1e11e9ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.328565] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1396.328565] env[63241]: value = "task-1819962" [ 1396.328565] env[63241]: _type = "Task" [ 1396.328565] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.342045] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1819962, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.405714] env[63241]: DEBUG nova.policy [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62f227a738384c9b888cdbc63d009acb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43ef918430594de39503f9fbd65095b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1396.464527] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.571104] env[63241]: DEBUG oslo_concurrency.lockutils [req-012368b9-1b93-419f-9d12-4593e9b6ff78 req-afc8e70e-21b7-4fab-b87f-54785df9fa79 service nova] Releasing lock "refresh_cache-eb506425-4ecc-44b7-afa4-0901fc60b04f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.663650] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Releasing lock "refresh_cache-5fce9350-6d45-4bfb-a74b-f5b384ecb16c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.663959] env[63241]: DEBUG nova.compute.manager [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Instance network_info: |[{"id": "03370c0c-303d-4511-8cd8-44be5bad305a", "address": "fa:16:3e:4b:9a:9e", "network": {"id": "9442adce-80bf-4a10-8a22-a3b54d25cd68", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1232087337-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bab059b7aff4961a226ef988e125438", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cfbd1264-be3d-4ca9-953a-df79de7b010b", "external-id": "nsx-vlan-transportzone-543", "segmentation_id": 543, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03370c0c-30", "ovs_interfaceid": "03370c0c-303d-4511-8cd8-44be5bad305a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1396.664588] env[63241]: DEBUG oslo_concurrency.lockutils [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] Acquired lock "refresh_cache-5fce9350-6d45-4bfb-a74b-f5b384ecb16c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.664768] env[63241]: DEBUG nova.network.neutron [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Refreshing network info cache for port 03370c0c-303d-4511-8cd8-44be5bad305a {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.665929] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:9a:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cfbd1264-be3d-4ca9-953a-df79de7b010b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '03370c0c-303d-4511-8cd8-44be5bad305a', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1396.677032] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Creating folder: Project (7bab059b7aff4961a226ef988e125438). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.677032] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4202e7f9-92a2-4c80-a4af-5b0a0a080d1c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.695437] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Created folder: Project (7bab059b7aff4961a226ef988e125438) in parent group-v376927. [ 1396.695437] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Creating folder: Instances. Parent ref: group-v376990. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.695437] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8b2ea7c-0343-4f7b-9149-fef4346adbd6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.708024] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Created folder: Instances in parent group-v376990. [ 1396.708420] env[63241]: DEBUG oslo.service.loopingcall [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.708657] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1396.708842] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b65e7ac0-ae39-442a-9230-e243dd154610 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.736862] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1396.736862] env[63241]: value = "task-1819965" [ 1396.736862] env[63241]: _type = "Task" [ 1396.736862] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.740744] env[63241]: DEBUG nova.compute.manager [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1396.751454] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819965, 'name': CreateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.793822] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "99eccbef-0e76-4532-af2f-5d74e563e1d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.794108] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "99eccbef-0e76-4532-af2f-5d74e563e1d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.794343] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "99eccbef-0e76-4532-af2f-5d74e563e1d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.794530] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "99eccbef-0e76-4532-af2f-5d74e563e1d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.794696] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "99eccbef-0e76-4532-af2f-5d74e563e1d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.798316] env[63241]: INFO nova.compute.manager [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Terminating instance [ 1396.806356] env[63241]: DEBUG nova.compute.manager [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1396.806356] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1396.806565] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6911aac-c6ab-41d0-b778-e90599bfcde7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.815820] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1396.815905] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ff9cde3-267a-4f0b-9fb1-45e1a196679a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.824952] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69faa05-f2d8-4edf-9210-a26e3458af59 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.837844] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d895e6d4-e84f-4679-a76b-81b667ce174f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.844882] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1819962, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.229393} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.874025] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1396.874855] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e9c371-0057-4386-8426-529dcdeb53d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.877947] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6edadf-1678-408b-beb2-45de1a4435f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.908560] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 94a604da-ad3d-415a-aa92-d648e3da803d/94a604da-ad3d-415a-aa92-d648e3da803d.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1396.909694] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a8032f8-152e-45fc-a667-a764707449aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.926728] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e081bc9e-8e42-4991-9891-901a9a439076 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.942139] env[63241]: DEBUG nova.compute.provider_tree [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.944889] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1396.944889] env[63241]: value = "task-1819967" [ 1396.944889] env[63241]: _type = "Task" [ 1396.944889] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.955745] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1819967, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.097192] env[63241]: DEBUG nova.network.neutron [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Successfully created port: af23e4e4-1f35-4054-9d3e-03ba04cc1223 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1397.144307] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1397.144307] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1397.144307] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleting the datastore file [datastore1] 99eccbef-0e76-4532-af2f-5d74e563e1d2 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1397.144307] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d22f4323-3a31-4393-9bb1-0ff920fa1eee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.151758] env[63241]: DEBUG oslo_vmware.api [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1397.151758] env[63241]: value = "task-1819968" [ 1397.151758] env[63241]: _type = "Task" [ 1397.151758] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.160619] env[63241]: DEBUG oslo_vmware.api [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819968, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.186509] env[63241]: DEBUG oslo_concurrency.lockutils [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquiring lock "eb506425-4ecc-44b7-afa4-0901fc60b04f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.186850] env[63241]: DEBUG oslo_concurrency.lockutils [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Lock "eb506425-4ecc-44b7-afa4-0901fc60b04f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.187137] env[63241]: DEBUG oslo_concurrency.lockutils [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquiring lock "eb506425-4ecc-44b7-afa4-0901fc60b04f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.187433] env[63241]: DEBUG oslo_concurrency.lockutils [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Lock "eb506425-4ecc-44b7-afa4-0901fc60b04f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.187608] env[63241]: DEBUG oslo_concurrency.lockutils [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Lock "eb506425-4ecc-44b7-afa4-0901fc60b04f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.191459] env[63241]: INFO nova.compute.manager [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Terminating instance [ 1397.194739] env[63241]: DEBUG nova.compute.manager [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1397.194739] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1397.195759] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6767a553-1a86-4cc8-a9f7-9634a31bf96a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.203485] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1397.203741] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-926dd97b-c9a5-4d49-82e6-441056642627 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.209810] env[63241]: DEBUG oslo_vmware.api [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for the task: (returnval){ [ 1397.209810] env[63241]: value = "task-1819969" [ 1397.209810] env[63241]: _type = "Task" [ 1397.209810] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.222842] env[63241]: DEBUG oslo_vmware.api [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819969, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.253539] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819965, 'name': CreateVM_Task, 'duration_secs': 0.428117} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.253582] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1397.254430] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.254588] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.255033] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1397.255153] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9c8c4a9-f5f1-4d32-97ac-def893f322f9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.262534] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for the task: (returnval){ [ 1397.262534] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a8b636-a76c-e1ba-fbfa-ab7c52ec9587" [ 1397.262534] env[63241]: _type = "Task" [ 1397.262534] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.271996] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a8b636-a76c-e1ba-fbfa-ab7c52ec9587, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.449293] env[63241]: DEBUG nova.scheduler.client.report [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1397.461532] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1819967, 'name': ReconfigVM_Task, 'duration_secs': 0.333489} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.461918] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 94a604da-ad3d-415a-aa92-d648e3da803d/94a604da-ad3d-415a-aa92-d648e3da803d.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1397.462674] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8bf57e7-904b-4a44-ade0-547dbd7d06a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.469229] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1397.469229] env[63241]: value = "task-1819970" [ 1397.469229] env[63241]: _type = "Task" [ 1397.469229] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.479669] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1819970, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.666469] env[63241]: DEBUG oslo_vmware.api [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1819968, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210624} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.666692] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1397.666906] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1397.667114] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1397.667287] env[63241]: INFO nova.compute.manager [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Took 0.86 seconds to destroy the instance on the hypervisor. [ 1397.667524] env[63241]: DEBUG oslo.service.loopingcall [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1397.667712] env[63241]: DEBUG nova.compute.manager [-] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1397.668318] env[63241]: DEBUG nova.network.neutron [-] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1397.719350] env[63241]: DEBUG nova.network.neutron [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Updated VIF entry in instance network info cache for port 03370c0c-303d-4511-8cd8-44be5bad305a. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1397.719701] env[63241]: DEBUG nova.network.neutron [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Updating instance_info_cache with network_info: [{"id": "03370c0c-303d-4511-8cd8-44be5bad305a", "address": "fa:16:3e:4b:9a:9e", "network": {"id": "9442adce-80bf-4a10-8a22-a3b54d25cd68", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1232087337-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bab059b7aff4961a226ef988e125438", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cfbd1264-be3d-4ca9-953a-df79de7b010b", "external-id": "nsx-vlan-transportzone-543", "segmentation_id": 543, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03370c0c-30", "ovs_interfaceid": "03370c0c-303d-4511-8cd8-44be5bad305a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.726891] env[63241]: DEBUG oslo_vmware.api [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819969, 'name': PowerOffVM_Task, 'duration_secs': 0.202273} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.727453] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1397.727637] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1397.727881] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3128c74b-5775-4b5d-b129-d908d3c74645 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.756406] env[63241]: DEBUG nova.compute.manager [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1397.774348] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a8b636-a76c-e1ba-fbfa-ab7c52ec9587, 'name': SearchDatastore_Task, 'duration_secs': 0.024225} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.775421] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.775421] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1397.775421] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.775421] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.775654] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1397.775724] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f350d12-bded-4c85-9e81-55d34830962d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.784250] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1397.784250] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1397.784952] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74b13cee-5aa2-41ae-b5a2-16a853191a08 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.790326] env[63241]: DEBUG nova.virt.hardware [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1397.790587] env[63241]: DEBUG nova.virt.hardware [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1397.790747] env[63241]: DEBUG nova.virt.hardware [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1397.790928] env[63241]: DEBUG nova.virt.hardware [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1397.791093] env[63241]: DEBUG nova.virt.hardware [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1397.791247] env[63241]: DEBUG nova.virt.hardware [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1397.791454] env[63241]: DEBUG nova.virt.hardware [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1397.791611] env[63241]: DEBUG nova.virt.hardware [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1397.791778] env[63241]: DEBUG nova.virt.hardware [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1397.791942] env[63241]: DEBUG nova.virt.hardware [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1397.792126] env[63241]: DEBUG nova.virt.hardware [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1397.792924] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc41ea39-6a27-49d7-bc35-ebbe39a2e262 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.799338] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for the task: (returnval){ [ 1397.799338] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524805f3-f9b4-87ed-a8d7-7dee2f911479" [ 1397.799338] env[63241]: _type = "Task" [ 1397.799338] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.808261] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d76882-23cf-4a23-a2a5-f51cf2fe7d4d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.827992] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524805f3-f9b4-87ed-a8d7-7dee2f911479, 'name': SearchDatastore_Task, 'duration_secs': 0.010479} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.828788] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58e5feff-2cc7-4cb6-a3b4-252ea3a25b59 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.834629] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for the task: (returnval){ [ 1397.834629] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5275c271-a6cb-2875-5a06-d87c1a5dd640" [ 1397.834629] env[63241]: _type = "Task" [ 1397.834629] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.842385] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5275c271-a6cb-2875-5a06-d87c1a5dd640, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.957938] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.234s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.960444] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.113s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.962836] env[63241]: INFO nova.compute.claims [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1397.980153] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1819970, 'name': Rename_Task, 'duration_secs': 0.1656} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.980443] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1397.981221] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c54f66ce-9527-40dd-8465-28959eb84526 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.993022] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1397.993022] env[63241]: value = "task-1819972" [ 1397.993022] env[63241]: _type = "Task" [ 1397.993022] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.999911] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1819972, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.005766] env[63241]: INFO nova.scheduler.client.report [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Deleted allocations for instance 97890eda-0c1d-4423-acd2-60d3097c6f8a [ 1398.016750] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1398.017298] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1398.017964] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Deleting the datastore file [datastore1] eb506425-4ecc-44b7-afa4-0901fc60b04f {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1398.018418] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9229cdd4-8618-43bf-a445-7b061f88cf9c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.025836] env[63241]: DEBUG oslo_vmware.api [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for the task: (returnval){ [ 1398.025836] env[63241]: value = "task-1819973" [ 1398.025836] env[63241]: _type = "Task" [ 1398.025836] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.035803] env[63241]: DEBUG oslo_vmware.api [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819973, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.230493] env[63241]: DEBUG oslo_concurrency.lockutils [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] Releasing lock "refresh_cache-5fce9350-6d45-4bfb-a74b-f5b384ecb16c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.230493] env[63241]: DEBUG nova.compute.manager [req-ff13734c-82e3-4af6-935d-c6a24d0e0e73 req-a400b145-65b9-41c6-ac40-0438480a9c78 service nova] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Received event network-vif-deleted-003d3fab-f7ce-4892-b925-c2280d3a9ae2 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1398.260382] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.260382] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.345462] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5275c271-a6cb-2875-5a06-d87c1a5dd640, 'name': SearchDatastore_Task, 'duration_secs': 0.013943} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.347364] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.347364] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 5fce9350-6d45-4bfb-a74b-f5b384ecb16c/5fce9350-6d45-4bfb-a74b-f5b384ecb16c.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1398.347364] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd129537-6560-449c-9518-dbdb26c407a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.353060] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for the task: (returnval){ [ 1398.353060] env[63241]: value = "task-1819974" [ 1398.353060] env[63241]: _type = "Task" [ 1398.353060] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.361478] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1819974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.445855] env[63241]: DEBUG nova.network.neutron [-] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.503124] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1819972, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.513436] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eca6d2a3-7923-480f-aace-7915f9f5dbdf tempest-ServerPasswordTestJSON-346833765 tempest-ServerPasswordTestJSON-346833765-project-member] Lock "97890eda-0c1d-4423-acd2-60d3097c6f8a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.613s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.538711] env[63241]: DEBUG oslo_vmware.api [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Task: {'id': task-1819973, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17005} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.538941] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1398.539391] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1398.539643] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1398.539867] env[63241]: INFO nova.compute.manager [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Took 1.35 seconds to destroy the instance on the hypervisor. [ 1398.540345] env[63241]: DEBUG oslo.service.loopingcall [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1398.540679] env[63241]: DEBUG nova.compute.manager [-] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1398.540747] env[63241]: DEBUG nova.network.neutron [-] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1398.728216] env[63241]: DEBUG nova.compute.manager [req-845ef05e-78c6-40b4-8b1a-790b4d11fef0 req-5937ed65-5d5d-45b6-ba7d-4850b0d73b3e service nova] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Received event network-vif-deleted-c5990c8b-c34c-4221-9e72-567817e9637d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1398.866682] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1819974, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.931978] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquiring lock "72a11582-1fad-428a-bde1-e9d0b05731cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.931978] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Lock "72a11582-1fad-428a-bde1-e9d0b05731cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.953166] env[63241]: INFO nova.compute.manager [-] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Took 1.28 seconds to deallocate network for instance. [ 1399.006642] env[63241]: DEBUG oslo_vmware.api [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1819972, 'name': PowerOnVM_Task, 'duration_secs': 0.611852} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.007238] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1399.008204] env[63241]: INFO nova.compute.manager [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Took 9.65 seconds to spawn the instance on the hypervisor. [ 1399.008204] env[63241]: DEBUG nova.compute.manager [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1399.009286] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b126e88f-225c-464f-9ff6-4233a420a838 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.310256] env[63241]: DEBUG nova.network.neutron [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Successfully updated port: af23e4e4-1f35-4054-9d3e-03ba04cc1223 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1399.367876] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1819974, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614657} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.371550] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 5fce9350-6d45-4bfb-a74b-f5b384ecb16c/5fce9350-6d45-4bfb-a74b-f5b384ecb16c.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1399.371550] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1399.372011] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b624857c-bdbb-4284-8fea-4676cb1ffd35 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.380608] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for the task: (returnval){ [ 1399.380608] env[63241]: value = "task-1819975" [ 1399.380608] env[63241]: _type = "Task" [ 1399.380608] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.394766] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1819975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.411875] env[63241]: DEBUG nova.compute.manager [req-9b5beaf1-3d36-4266-90d0-d2bdd03fc2ad req-019507eb-c691-4e70-9411-678c137be2aa service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Received event network-vif-plugged-af23e4e4-1f35-4054-9d3e-03ba04cc1223 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1399.411875] env[63241]: DEBUG oslo_concurrency.lockutils [req-9b5beaf1-3d36-4266-90d0-d2bdd03fc2ad req-019507eb-c691-4e70-9411-678c137be2aa service nova] Acquiring lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.412148] env[63241]: DEBUG oslo_concurrency.lockutils [req-9b5beaf1-3d36-4266-90d0-d2bdd03fc2ad req-019507eb-c691-4e70-9411-678c137be2aa service nova] Lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.412286] env[63241]: DEBUG oslo_concurrency.lockutils [req-9b5beaf1-3d36-4266-90d0-d2bdd03fc2ad req-019507eb-c691-4e70-9411-678c137be2aa service nova] Lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.412448] env[63241]: DEBUG nova.compute.manager [req-9b5beaf1-3d36-4266-90d0-d2bdd03fc2ad req-019507eb-c691-4e70-9411-678c137be2aa service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] No waiting events found dispatching network-vif-plugged-af23e4e4-1f35-4054-9d3e-03ba04cc1223 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1399.412608] env[63241]: WARNING nova.compute.manager [req-9b5beaf1-3d36-4266-90d0-d2bdd03fc2ad req-019507eb-c691-4e70-9411-678c137be2aa service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Received unexpected event network-vif-plugged-af23e4e4-1f35-4054-9d3e-03ba04cc1223 for instance with vm_state building and task_state spawning. [ 1399.459935] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.461024] env[63241]: DEBUG nova.network.neutron [-] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.535344] env[63241]: INFO nova.compute.manager [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Took 37.55 seconds to build instance. [ 1399.560787] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae4d890-cea8-4e8f-8bf0-1c80f766b693 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.569732] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d4cb35-20a2-4a35-9c41-1aeaed99360a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.608993] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a6f616-e1ce-42b9-98b0-774c3efb8322 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.619148] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f399a15d-2014-4ce4-b6f8-c7f1d1723742 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.638035] env[63241]: DEBUG nova.compute.provider_tree [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1399.813466] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquiring lock "refresh_cache-fe8eaeee-56b2-4974-a448-8f95848b3b3a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.813696] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquired lock "refresh_cache-fe8eaeee-56b2-4974-a448-8f95848b3b3a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.813857] env[63241]: DEBUG nova.network.neutron [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1399.894588] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1819975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0697} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.894588] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1399.896044] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2866f3-6570-4f3b-b0d4-3527eb3ce058 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.919307] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 5fce9350-6d45-4bfb-a74b-f5b384ecb16c/5fce9350-6d45-4bfb-a74b-f5b384ecb16c.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1399.919628] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cf14a3a-b052-43b5-8a38-bbb8d1dbab8f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.940460] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for the task: (returnval){ [ 1399.940460] env[63241]: value = "task-1819976" [ 1399.940460] env[63241]: _type = "Task" [ 1399.940460] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.948847] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1819976, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.964251] env[63241]: INFO nova.compute.manager [-] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Took 1.42 seconds to deallocate network for instance. [ 1400.041192] env[63241]: DEBUG oslo_concurrency.lockutils [None req-60540948-79be-4336-9dff-979afebb4169 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "94a604da-ad3d-415a-aa92-d648e3da803d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.112s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.169791] env[63241]: ERROR nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [req-621fb33b-a08f-405b-ac95-98d2de7203f4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-621fb33b-a08f-405b-ac95-98d2de7203f4"}]} [ 1400.192031] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1400.212278] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1400.212878] env[63241]: DEBUG nova.compute.provider_tree [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1400.231863] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1400.254515] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1400.384878] env[63241]: DEBUG nova.network.neutron [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1400.453786] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1819976, 'name': ReconfigVM_Task, 'duration_secs': 0.297274} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.454125] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 5fce9350-6d45-4bfb-a74b-f5b384ecb16c/5fce9350-6d45-4bfb-a74b-f5b384ecb16c.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1400.457152] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a77f0c47-5871-4869-afe3-ff38a0e95a85 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.463952] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for the task: (returnval){ [ 1400.463952] env[63241]: value = "task-1819977" [ 1400.463952] env[63241]: _type = "Task" [ 1400.463952] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.475542] env[63241]: DEBUG oslo_concurrency.lockutils [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.476423] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1819977, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.546677] env[63241]: DEBUG nova.compute.manager [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1400.668548] env[63241]: DEBUG nova.network.neutron [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Updating instance_info_cache with network_info: [{"id": "af23e4e4-1f35-4054-9d3e-03ba04cc1223", "address": "fa:16:3e:e2:65:5b", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf23e4e4-1f", "ovs_interfaceid": "af23e4e4-1f35-4054-9d3e-03ba04cc1223", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.851018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2dc8521-c605-44fd-a210-ea28d07b5442 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.863478] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0375ba6a-757a-4278-9e72-086610de851f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.899489] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb458dd7-2705-4f39-be5d-6fc59772c5d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.907788] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04599fdb-43d6-46f2-bb7d-8160f7434c7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.923395] env[63241]: DEBUG nova.compute.provider_tree [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1400.977869] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1819977, 'name': Rename_Task, 'duration_secs': 0.149917} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.978198] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1400.978434] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81728a15-818b-4614-ad8c-c088adde4415 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.988811] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for the task: (returnval){ [ 1400.988811] env[63241]: value = "task-1819978" [ 1400.988811] env[63241]: _type = "Task" [ 1400.988811] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.002398] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1819978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.060839] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fbf492b3-3e71-4116-a955-56869488bc24 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "f72a1045-1404-4a4c-82da-b452ea9429d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.061136] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fbf492b3-3e71-4116-a955-56869488bc24 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "f72a1045-1404-4a4c-82da-b452ea9429d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.082824] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.171896] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Releasing lock "refresh_cache-fe8eaeee-56b2-4974-a448-8f95848b3b3a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.172397] env[63241]: DEBUG nova.compute.manager [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Instance network_info: |[{"id": "af23e4e4-1f35-4054-9d3e-03ba04cc1223", "address": "fa:16:3e:e2:65:5b", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf23e4e4-1f", "ovs_interfaceid": "af23e4e4-1f35-4054-9d3e-03ba04cc1223", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1401.172970] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:65:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af23e4e4-1f35-4054-9d3e-03ba04cc1223', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1401.181195] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Creating folder: Project (43ef918430594de39503f9fbd65095b6). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1401.181837] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a31d6319-97e6-4234-8060-71cea4b13dab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.194807] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Created folder: Project (43ef918430594de39503f9fbd65095b6) in parent group-v376927. [ 1401.194807] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Creating folder: Instances. Parent ref: group-v376993. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1401.194807] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-105e8a74-a2d8-4ee6-a5d1-86c1c96e51ad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.206177] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Created folder: Instances in parent group-v376993. [ 1401.206793] env[63241]: DEBUG oslo.service.loopingcall [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1401.206793] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1401.206897] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9248bc27-5790-4a4a-84f0-7126a811dd9d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.226606] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1401.226606] env[63241]: value = "task-1819981" [ 1401.226606] env[63241]: _type = "Task" [ 1401.226606] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.235293] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819981, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.449696] env[63241]: ERROR nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [req-b9dd4d05-6494-40b1-ba20-33a62e00376a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b9dd4d05-6494-40b1-ba20-33a62e00376a"}]} [ 1401.472054] env[63241]: DEBUG nova.compute.manager [req-71feba24-e973-46b9-9afd-2e1ea3cf22ff req-db90b370-bb87-49f8-a4cb-458127fb7360 service nova] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Received event network-vif-deleted-cd1c9913-c7c6-4258-9006-ee7987594482 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1401.474637] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1401.498024] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1401.498024] env[63241]: DEBUG nova.compute.provider_tree [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1401.511883] env[63241]: DEBUG oslo_vmware.api [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1819978, 'name': PowerOnVM_Task, 'duration_secs': 0.487305} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.513322] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1401.515474] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquiring lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.515693] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.515896] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1401.516112] env[63241]: INFO nova.compute.manager [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Took 8.52 seconds to spawn the instance on the hypervisor. [ 1401.516321] env[63241]: DEBUG nova.compute.manager [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1401.518086] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177b323c-d3a4-4fdb-bae2-ae491560cf4c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.540337] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1401.681488] env[63241]: DEBUG nova.compute.manager [req-34d9cb2d-02c0-4837-9664-68f14e7f67f7 req-323fd58f-2889-4f2d-9594-9821ca44373b service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Received event network-changed-af23e4e4-1f35-4054-9d3e-03ba04cc1223 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1401.681726] env[63241]: DEBUG nova.compute.manager [req-34d9cb2d-02c0-4837-9664-68f14e7f67f7 req-323fd58f-2889-4f2d-9594-9821ca44373b service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Refreshing instance network info cache due to event network-changed-af23e4e4-1f35-4054-9d3e-03ba04cc1223. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1401.681890] env[63241]: DEBUG oslo_concurrency.lockutils [req-34d9cb2d-02c0-4837-9664-68f14e7f67f7 req-323fd58f-2889-4f2d-9594-9821ca44373b service nova] Acquiring lock "refresh_cache-fe8eaeee-56b2-4974-a448-8f95848b3b3a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.682634] env[63241]: DEBUG oslo_concurrency.lockutils [req-34d9cb2d-02c0-4837-9664-68f14e7f67f7 req-323fd58f-2889-4f2d-9594-9821ca44373b service nova] Acquired lock "refresh_cache-fe8eaeee-56b2-4974-a448-8f95848b3b3a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.682937] env[63241]: DEBUG nova.network.neutron [req-34d9cb2d-02c0-4837-9664-68f14e7f67f7 req-323fd58f-2889-4f2d-9594-9821ca44373b service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Refreshing network info cache for port af23e4e4-1f35-4054-9d3e-03ba04cc1223 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1401.737863] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819981, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.049081] env[63241]: INFO nova.compute.manager [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Took 39.19 seconds to build instance. [ 1402.167228] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493516c1-92c8-49a9-8ee6-381dc553b009 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.175863] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8546a7-0ae1-45fb-967c-ef4dc439bffc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.216285] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d50a802-b9d3-436d-83f2-b2e388b14ecc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.224035] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec8a568-551a-495d-b152-f0dca66a4477 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.244408] env[63241]: DEBUG nova.compute.provider_tree [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1402.249164] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819981, 'name': CreateVM_Task, 'duration_secs': 0.652868} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.251786] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1402.251786] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.251786] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.251786] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1402.252388] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa369c1f-ba11-4fce-95f2-97e607e8f419 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.257877] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for the task: (returnval){ [ 1402.257877] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b1ca7a-7af1-fb54-8842-f48a145bc9bc" [ 1402.257877] env[63241]: _type = "Task" [ 1402.257877] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.268834] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b1ca7a-7af1-fb54-8842-f48a145bc9bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.551824] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d516dc0-59bd-4c26-b2ed-1b08b4f73300 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.011s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.583108] env[63241]: DEBUG nova.network.neutron [req-34d9cb2d-02c0-4837-9664-68f14e7f67f7 req-323fd58f-2889-4f2d-9594-9821ca44373b service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Updated VIF entry in instance network info cache for port af23e4e4-1f35-4054-9d3e-03ba04cc1223. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1402.583896] env[63241]: DEBUG nova.network.neutron [req-34d9cb2d-02c0-4837-9664-68f14e7f67f7 req-323fd58f-2889-4f2d-9594-9821ca44373b service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Updating instance_info_cache with network_info: [{"id": "af23e4e4-1f35-4054-9d3e-03ba04cc1223", "address": "fa:16:3e:e2:65:5b", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf23e4e4-1f", "ovs_interfaceid": "af23e4e4-1f35-4054-9d3e-03ba04cc1223", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.771636] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b1ca7a-7af1-fb54-8842-f48a145bc9bc, 'name': SearchDatastore_Task, 'duration_secs': 0.019566} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.772751] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.772751] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1402.772751] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.772751] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.773039] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1402.773620] env[63241]: ERROR nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [req-99806d80-564a-4149-b080-6ffe88b3fb39] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-99806d80-564a-4149-b080-6ffe88b3fb39"}]} [ 1402.774078] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb538a89-d6c0-42d2-8712-18b547f84966 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.790341] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1402.790341] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1402.790815] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98a57464-0ea7-44a5-a128-5e44cb6c5e05 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.798989] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1402.803951] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for the task: (returnval){ [ 1402.803951] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]525db244-5f8f-edcb-f0f7-72c2bf6f09c7" [ 1402.803951] env[63241]: _type = "Task" [ 1402.803951] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.812659] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525db244-5f8f-edcb-f0f7-72c2bf6f09c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.815265] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1402.815497] env[63241]: DEBUG nova.compute.provider_tree [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1402.832865] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1402.854670] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1403.042290] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "5203c12e-14a0-4736-8185-8ead9a29b03b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.042290] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "5203c12e-14a0-4736-8185-8ead9a29b03b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.055085] env[63241]: DEBUG nova.compute.manager [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1403.086366] env[63241]: DEBUG oslo_concurrency.lockutils [req-34d9cb2d-02c0-4837-9664-68f14e7f67f7 req-323fd58f-2889-4f2d-9594-9821ca44373b service nova] Releasing lock "refresh_cache-fe8eaeee-56b2-4974-a448-8f95848b3b3a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.319257] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525db244-5f8f-edcb-f0f7-72c2bf6f09c7, 'name': SearchDatastore_Task, 'duration_secs': 0.022766} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.320147] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2c1d77d-bfbe-4a74-9391-fd7de6edf0a2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.329151] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for the task: (returnval){ [ 1403.329151] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d54b6c-1bcb-30f3-482b-b2cd241574dd" [ 1403.329151] env[63241]: _type = "Task" [ 1403.329151] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.339304] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d54b6c-1bcb-30f3-482b-b2cd241574dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.482772] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14b959a-fa0a-4510-bc71-f8d355b98b60 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.491541] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ebd3d8-59d7-4b5a-8698-0081a2693c44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.526298] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce933b0e-24f5-4e19-a8ad-a5b895eca129 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.530570] env[63241]: DEBUG nova.compute.manager [req-9564709e-eb24-4487-b1e3-00bda97c7c84 req-29742caa-67d0-469c-8a81-46e43b01d15d service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Received event network-changed-03370c0c-303d-4511-8cd8-44be5bad305a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1403.530758] env[63241]: DEBUG nova.compute.manager [req-9564709e-eb24-4487-b1e3-00bda97c7c84 req-29742caa-67d0-469c-8a81-46e43b01d15d service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Refreshing instance network info cache due to event network-changed-03370c0c-303d-4511-8cd8-44be5bad305a. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1403.530964] env[63241]: DEBUG oslo_concurrency.lockutils [req-9564709e-eb24-4487-b1e3-00bda97c7c84 req-29742caa-67d0-469c-8a81-46e43b01d15d service nova] Acquiring lock "refresh_cache-5fce9350-6d45-4bfb-a74b-f5b384ecb16c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.531118] env[63241]: DEBUG oslo_concurrency.lockutils [req-9564709e-eb24-4487-b1e3-00bda97c7c84 req-29742caa-67d0-469c-8a81-46e43b01d15d service nova] Acquired lock "refresh_cache-5fce9350-6d45-4bfb-a74b-f5b384ecb16c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.531329] env[63241]: DEBUG nova.network.neutron [req-9564709e-eb24-4487-b1e3-00bda97c7c84 req-29742caa-67d0-469c-8a81-46e43b01d15d service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Refreshing network info cache for port 03370c0c-303d-4511-8cd8-44be5bad305a {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1403.538830] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc53525-6e89-4f11-8b37-39d5f26f5b31 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.553243] env[63241]: DEBUG nova.compute.provider_tree [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1403.589382] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.840909] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d54b6c-1bcb-30f3-482b-b2cd241574dd, 'name': SearchDatastore_Task, 'duration_secs': 0.016636} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.840909] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.841221] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] fe8eaeee-56b2-4974-a448-8f95848b3b3a/fe8eaeee-56b2-4974-a448-8f95848b3b3a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1403.841836] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ae28dfd-b1ee-4bd7-add5-ad4b424e7bb6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.850680] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for the task: (returnval){ [ 1403.850680] env[63241]: value = "task-1819985" [ 1403.850680] env[63241]: _type = "Task" [ 1403.850680] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.859696] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.099184] env[63241]: DEBUG nova.scheduler.client.report [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 50 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1404.099493] env[63241]: DEBUG nova.compute.provider_tree [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 50 to 51 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1404.099729] env[63241]: DEBUG nova.compute.provider_tree [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1404.364713] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819985, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497347} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.364976] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] fe8eaeee-56b2-4974-a448-8f95848b3b3a/fe8eaeee-56b2-4974-a448-8f95848b3b3a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1404.366028] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1404.368159] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b7587bb-26fe-4849-ac08-8a2ed5cac03c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.376030] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for the task: (returnval){ [ 1404.376030] env[63241]: value = "task-1819986" [ 1404.376030] env[63241]: _type = "Task" [ 1404.376030] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.388990] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819986, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.440484] env[63241]: DEBUG nova.network.neutron [req-9564709e-eb24-4487-b1e3-00bda97c7c84 req-29742caa-67d0-469c-8a81-46e43b01d15d service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Updated VIF entry in instance network info cache for port 03370c0c-303d-4511-8cd8-44be5bad305a. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1404.440818] env[63241]: DEBUG nova.network.neutron [req-9564709e-eb24-4487-b1e3-00bda97c7c84 req-29742caa-67d0-469c-8a81-46e43b01d15d service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Updating instance_info_cache with network_info: [{"id": "03370c0c-303d-4511-8cd8-44be5bad305a", "address": "fa:16:3e:4b:9a:9e", "network": {"id": "9442adce-80bf-4a10-8a22-a3b54d25cd68", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1232087337-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bab059b7aff4961a226ef988e125438", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cfbd1264-be3d-4ca9-953a-df79de7b010b", "external-id": "nsx-vlan-transportzone-543", "segmentation_id": 543, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03370c0c-30", "ovs_interfaceid": "03370c0c-303d-4511-8cd8-44be5bad305a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.606180] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.645s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.606767] env[63241]: DEBUG nova.compute.manager [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1404.609416] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.254s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.610806] env[63241]: INFO nova.compute.claims [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1404.892053] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819986, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.178657} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.892053] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1404.892053] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5649358b-d53f-4e12-b78f-3db88aceed3d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.912564] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] fe8eaeee-56b2-4974-a448-8f95848b3b3a/fe8eaeee-56b2-4974-a448-8f95848b3b3a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1404.913202] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a436104-6c20-4cd1-bf37-3029229d5013 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.937022] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for the task: (returnval){ [ 1404.937022] env[63241]: value = "task-1819987" [ 1404.937022] env[63241]: _type = "Task" [ 1404.937022] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.949549] env[63241]: DEBUG oslo_concurrency.lockutils [req-9564709e-eb24-4487-b1e3-00bda97c7c84 req-29742caa-67d0-469c-8a81-46e43b01d15d service nova] Releasing lock "refresh_cache-5fce9350-6d45-4bfb-a74b-f5b384ecb16c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.950709] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819987, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.115688] env[63241]: DEBUG nova.compute.utils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1405.119148] env[63241]: DEBUG nova.compute.manager [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1405.119367] env[63241]: DEBUG nova.network.neutron [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1405.171142] env[63241]: DEBUG nova.policy [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '365c48bb1ce14b21a6f7f2085e1c6930', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3681ee52c08b44128ad4b05f19c23cc8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1405.449624] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.468443] env[63241]: DEBUG nova.network.neutron [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Successfully created port: 985a0245-a31d-4625-9329-9eccd849fb4d {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1405.621922] env[63241]: DEBUG nova.compute.manager [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1405.945898] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819987, 'name': ReconfigVM_Task, 'duration_secs': 0.995965} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.946030] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Reconfigured VM instance instance-00000015 to attach disk [datastore1] fe8eaeee-56b2-4974-a448-8f95848b3b3a/fe8eaeee-56b2-4974-a448-8f95848b3b3a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1405.946650] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51b55839-fea9-456a-a5d1-084664dbada1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.957155] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for the task: (returnval){ [ 1405.957155] env[63241]: value = "task-1819988" [ 1405.957155] env[63241]: _type = "Task" [ 1405.957155] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.970258] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819988, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.277972] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682f77aa-5b54-443e-994c-d65b50c1e092 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.284256] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97892d3-9243-44ed-b9a9-26d724a05d36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.321621] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b123f23b-ee06-4c6f-a9a2-34b5582daebc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.343509] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc0ebea-0b56-4537-b2b0-8a1756fb522f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.370949] env[63241]: DEBUG nova.compute.provider_tree [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1406.472021] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819988, 'name': Rename_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.640708] env[63241]: DEBUG nova.compute.manager [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1406.662123] env[63241]: DEBUG nova.virt.hardware [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1406.662443] env[63241]: DEBUG nova.virt.hardware [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1406.662523] env[63241]: DEBUG nova.virt.hardware [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1406.662708] env[63241]: DEBUG nova.virt.hardware [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1406.662900] env[63241]: DEBUG nova.virt.hardware [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1406.663105] env[63241]: DEBUG nova.virt.hardware [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1406.663345] env[63241]: DEBUG nova.virt.hardware [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1406.663502] env[63241]: DEBUG nova.virt.hardware [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1406.663671] env[63241]: DEBUG nova.virt.hardware [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1406.663949] env[63241]: DEBUG nova.virt.hardware [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1406.663988] env[63241]: DEBUG nova.virt.hardware [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1406.664859] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eef4c94-fbdf-45e1-a30b-cab028c93133 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.675668] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8f3a20-0c5f-4c24-8fe8-588b2fcd3b36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.912654] env[63241]: DEBUG nova.scheduler.client.report [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 51 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1406.912933] env[63241]: DEBUG nova.compute.provider_tree [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 51 to 52 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1406.913165] env[63241]: DEBUG nova.compute.provider_tree [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1406.969127] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819988, 'name': Rename_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.233013] env[63241]: DEBUG nova.compute.manager [req-c751e3d7-12c9-4af0-8b22-c859f0d44c1a req-77c90a5e-137c-4a3b-962c-2661876aefb4 service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Received event network-vif-plugged-985a0245-a31d-4625-9329-9eccd849fb4d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1407.233305] env[63241]: DEBUG oslo_concurrency.lockutils [req-c751e3d7-12c9-4af0-8b22-c859f0d44c1a req-77c90a5e-137c-4a3b-962c-2661876aefb4 service nova] Acquiring lock "41182989-2537-42f0-8c37-792b8b2c5206-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.233518] env[63241]: DEBUG oslo_concurrency.lockutils [req-c751e3d7-12c9-4af0-8b22-c859f0d44c1a req-77c90a5e-137c-4a3b-962c-2661876aefb4 service nova] Lock "41182989-2537-42f0-8c37-792b8b2c5206-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.233685] env[63241]: DEBUG oslo_concurrency.lockutils [req-c751e3d7-12c9-4af0-8b22-c859f0d44c1a req-77c90a5e-137c-4a3b-962c-2661876aefb4 service nova] Lock "41182989-2537-42f0-8c37-792b8b2c5206-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.233853] env[63241]: DEBUG nova.compute.manager [req-c751e3d7-12c9-4af0-8b22-c859f0d44c1a req-77c90a5e-137c-4a3b-962c-2661876aefb4 service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] No waiting events found dispatching network-vif-plugged-985a0245-a31d-4625-9329-9eccd849fb4d {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1407.234370] env[63241]: WARNING nova.compute.manager [req-c751e3d7-12c9-4af0-8b22-c859f0d44c1a req-77c90a5e-137c-4a3b-962c-2661876aefb4 service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Received unexpected event network-vif-plugged-985a0245-a31d-4625-9329-9eccd849fb4d for instance with vm_state building and task_state spawning. [ 1407.371108] env[63241]: DEBUG nova.network.neutron [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Successfully updated port: 985a0245-a31d-4625-9329-9eccd849fb4d {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1407.425343] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.816s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.425809] env[63241]: DEBUG nova.compute.manager [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1407.428814] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.473s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.433063] env[63241]: INFO nova.compute.claims [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1407.471950] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819988, 'name': Rename_Task, 'duration_secs': 1.279482} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.472337] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1407.472620] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-185bf127-043a-41c1-875a-1c13216f9b7e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.479164] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for the task: (returnval){ [ 1407.479164] env[63241]: value = "task-1819990" [ 1407.479164] env[63241]: _type = "Task" [ 1407.479164] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.489598] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819990, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.548300] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.548632] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.874552] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquiring lock "refresh_cache-41182989-2537-42f0-8c37-792b8b2c5206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.874552] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquired lock "refresh_cache-41182989-2537-42f0-8c37-792b8b2c5206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.874552] env[63241]: DEBUG nova.network.neutron [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1407.937025] env[63241]: DEBUG nova.compute.utils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1407.939376] env[63241]: DEBUG nova.compute.manager [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1407.939714] env[63241]: DEBUG nova.network.neutron [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1407.991708] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819990, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.021904] env[63241]: DEBUG nova.policy [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ecfeb57e9ebb4929867e7b4de8716c6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6a96e3f7eb649ffb15fd6a19a98fdd4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1408.411892] env[63241]: DEBUG nova.network.neutron [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1408.443061] env[63241]: DEBUG nova.compute.manager [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1408.496280] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819990, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.607036] env[63241]: DEBUG nova.network.neutron [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Updating instance_info_cache with network_info: [{"id": "985a0245-a31d-4625-9329-9eccd849fb4d", "address": "fa:16:3e:57:3d:35", "network": {"id": "c60c5870-cde6-4a59-ba48-5308b3e604ec", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2037452429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3681ee52c08b44128ad4b05f19c23cc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b91b49a8-b849-4d0c-97f7-74fdcd88ae03", "external-id": "nsx-vlan-transportzone-406", "segmentation_id": 406, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap985a0245-a3", "ovs_interfaceid": "985a0245-a31d-4625-9329-9eccd849fb4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.766035] env[63241]: DEBUG nova.network.neutron [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Successfully created port: 79c31016-2c0a-49c7-bfd0-bbed6734219c {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1408.997106] env[63241]: DEBUG oslo_vmware.api [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819990, 'name': PowerOnVM_Task, 'duration_secs': 1.056791} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.997338] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1408.997413] env[63241]: INFO nova.compute.manager [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Took 11.24 seconds to spawn the instance on the hypervisor. [ 1408.998032] env[63241]: DEBUG nova.compute.manager [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1408.998772] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be45fc7-da18-4c20-ae8e-1782222d5a33 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.003785] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570da891-7f8a-4448-9121-26a906366c43 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.015878] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8699b3b-ef88-4aea-a7ad-ee81585d8fd4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.052209] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180add8a-aeca-4f73-8136-b7dc59daba2e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.058052] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25acc1e0-e384-463a-970b-c9151b4c76b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.073815] env[63241]: DEBUG nova.compute.provider_tree [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1409.110508] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Releasing lock "refresh_cache-41182989-2537-42f0-8c37-792b8b2c5206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.110830] env[63241]: DEBUG nova.compute.manager [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Instance network_info: |[{"id": "985a0245-a31d-4625-9329-9eccd849fb4d", "address": "fa:16:3e:57:3d:35", "network": {"id": "c60c5870-cde6-4a59-ba48-5308b3e604ec", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2037452429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3681ee52c08b44128ad4b05f19c23cc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b91b49a8-b849-4d0c-97f7-74fdcd88ae03", "external-id": "nsx-vlan-transportzone-406", "segmentation_id": 406, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap985a0245-a3", "ovs_interfaceid": "985a0245-a31d-4625-9329-9eccd849fb4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1409.111254] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:3d:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b91b49a8-b849-4d0c-97f7-74fdcd88ae03', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '985a0245-a31d-4625-9329-9eccd849fb4d', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1409.119233] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Creating folder: Project (3681ee52c08b44128ad4b05f19c23cc8). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1409.119871] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a2a2dd2-4b44-4bda-832c-0eb1492d0a70 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.131934] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Created folder: Project (3681ee52c08b44128ad4b05f19c23cc8) in parent group-v376927. [ 1409.131934] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Creating folder: Instances. Parent ref: group-v376999. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1409.132060] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dff30ca9-dc84-4ebc-b852-85d3f8828912 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.140893] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Created folder: Instances in parent group-v376999. [ 1409.141153] env[63241]: DEBUG oslo.service.loopingcall [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1409.141340] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1409.141543] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2d71be4-9973-4271-b70d-261c0b265de3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.173523] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1409.173523] env[63241]: value = "task-1819994" [ 1409.173523] env[63241]: _type = "Task" [ 1409.173523] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.182465] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819994, 'name': CreateVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.267048] env[63241]: DEBUG nova.compute.manager [req-25b7caf4-dc87-4853-8828-01d20b68a8d7 req-a8dc6c1a-d57c-4a76-8493-c2d9541b8b53 service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Received event network-changed-985a0245-a31d-4625-9329-9eccd849fb4d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1409.267271] env[63241]: DEBUG nova.compute.manager [req-25b7caf4-dc87-4853-8828-01d20b68a8d7 req-a8dc6c1a-d57c-4a76-8493-c2d9541b8b53 service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Refreshing instance network info cache due to event network-changed-985a0245-a31d-4625-9329-9eccd849fb4d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1409.267512] env[63241]: DEBUG oslo_concurrency.lockutils [req-25b7caf4-dc87-4853-8828-01d20b68a8d7 req-a8dc6c1a-d57c-4a76-8493-c2d9541b8b53 service nova] Acquiring lock "refresh_cache-41182989-2537-42f0-8c37-792b8b2c5206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.268443] env[63241]: DEBUG oslo_concurrency.lockutils [req-25b7caf4-dc87-4853-8828-01d20b68a8d7 req-a8dc6c1a-d57c-4a76-8493-c2d9541b8b53 service nova] Acquired lock "refresh_cache-41182989-2537-42f0-8c37-792b8b2c5206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.268443] env[63241]: DEBUG nova.network.neutron [req-25b7caf4-dc87-4853-8828-01d20b68a8d7 req-a8dc6c1a-d57c-4a76-8493-c2d9541b8b53 service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Refreshing network info cache for port 985a0245-a31d-4625-9329-9eccd849fb4d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1409.458898] env[63241]: DEBUG nova.compute.manager [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1409.491759] env[63241]: DEBUG nova.virt.hardware [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1409.492198] env[63241]: DEBUG nova.virt.hardware [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1409.492198] env[63241]: DEBUG nova.virt.hardware [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1409.492384] env[63241]: DEBUG nova.virt.hardware [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1409.492531] env[63241]: DEBUG nova.virt.hardware [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1409.492676] env[63241]: DEBUG nova.virt.hardware [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1409.493475] env[63241]: DEBUG nova.virt.hardware [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1409.493475] env[63241]: DEBUG nova.virt.hardware [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1409.493475] env[63241]: DEBUG nova.virt.hardware [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1409.493475] env[63241]: DEBUG nova.virt.hardware [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1409.493713] env[63241]: DEBUG nova.virt.hardware [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1409.497061] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d936eaa-97e8-4b63-8823-51d7276d3aa0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.503923] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dcaaed7-ef00-4a56-8ebd-3411e2cd07f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.524160] env[63241]: INFO nova.compute.manager [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Took 37.37 seconds to build instance. [ 1409.578700] env[63241]: DEBUG nova.scheduler.client.report [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1409.684098] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1819994, 'name': CreateVM_Task, 'duration_secs': 0.402657} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.684302] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1409.684978] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.685171] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.685527] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1409.685783] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28c007dc-e343-47ca-b08a-bccc0fd62ac8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.690531] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for the task: (returnval){ [ 1409.690531] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52344986-2b33-cc67-7a6a-8046de4938b0" [ 1409.690531] env[63241]: _type = "Task" [ 1409.690531] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.698043] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52344986-2b33-cc67-7a6a-8046de4938b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.027055] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6ca4e0e2-8f50-464a-af29-f70bf328cbea tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.962s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.045470] env[63241]: DEBUG nova.network.neutron [req-25b7caf4-dc87-4853-8828-01d20b68a8d7 req-a8dc6c1a-d57c-4a76-8493-c2d9541b8b53 service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Updated VIF entry in instance network info cache for port 985a0245-a31d-4625-9329-9eccd849fb4d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1410.045470] env[63241]: DEBUG nova.network.neutron [req-25b7caf4-dc87-4853-8828-01d20b68a8d7 req-a8dc6c1a-d57c-4a76-8493-c2d9541b8b53 service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Updating instance_info_cache with network_info: [{"id": "985a0245-a31d-4625-9329-9eccd849fb4d", "address": "fa:16:3e:57:3d:35", "network": {"id": "c60c5870-cde6-4a59-ba48-5308b3e604ec", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2037452429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3681ee52c08b44128ad4b05f19c23cc8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b91b49a8-b849-4d0c-97f7-74fdcd88ae03", "external-id": "nsx-vlan-transportzone-406", "segmentation_id": 406, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap985a0245-a3", "ovs_interfaceid": "985a0245-a31d-4625-9329-9eccd849fb4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.083924] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.655s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.084500] env[63241]: DEBUG nova.compute.manager [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1410.087638] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.937s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.089070] env[63241]: INFO nova.compute.claims [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1410.208482] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52344986-2b33-cc67-7a6a-8046de4938b0, 'name': SearchDatastore_Task, 'duration_secs': 0.011148} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.208793] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.209034] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1410.209274] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.209548] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.209583] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1410.209816] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1bd1fef-c896-493f-a389-3e3602204f49 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.219919] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1410.220116] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1410.220928] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c85f846-2d29-44d0-b4f8-84e31a2129bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.226085] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for the task: (returnval){ [ 1410.226085] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e1b742-b965-a329-bcb2-d641b745685a" [ 1410.226085] env[63241]: _type = "Task" [ 1410.226085] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.233384] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e1b742-b965-a329-bcb2-d641b745685a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.402812] env[63241]: DEBUG nova.compute.manager [None req-92fedf39-79ff-48fc-99bd-116d8b654496 tempest-ServerDiagnosticsTest-533931434 tempest-ServerDiagnosticsTest-533931434-project-admin] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1410.403953] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f0f556-c341-4628-86dd-d5d1fb57c502 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.412165] env[63241]: INFO nova.compute.manager [None req-92fedf39-79ff-48fc-99bd-116d8b654496 tempest-ServerDiagnosticsTest-533931434 tempest-ServerDiagnosticsTest-533931434-project-admin] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Retrieving diagnostics [ 1410.413388] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36055a0-6781-4a72-9abe-9e651dd979b5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.465660] env[63241]: DEBUG nova.network.neutron [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Successfully updated port: 79c31016-2c0a-49c7-bfd0-bbed6734219c {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1410.529935] env[63241]: DEBUG nova.compute.manager [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1410.548198] env[63241]: DEBUG oslo_concurrency.lockutils [req-25b7caf4-dc87-4853-8828-01d20b68a8d7 req-a8dc6c1a-d57c-4a76-8493-c2d9541b8b53 service nova] Releasing lock "refresh_cache-41182989-2537-42f0-8c37-792b8b2c5206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.597435] env[63241]: DEBUG nova.compute.utils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1410.601687] env[63241]: DEBUG nova.compute.manager [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1410.601862] env[63241]: DEBUG nova.network.neutron [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1410.689132] env[63241]: DEBUG nova.policy [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1328b5860e6490aa44a9bd45f747ddf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58d423eaebec4695928cb0d2b698bbfd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1410.737288] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e1b742-b965-a329-bcb2-d641b745685a, 'name': SearchDatastore_Task, 'duration_secs': 0.034589} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.738367] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59abbb9f-866c-44af-93a8-5f9be35da662 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.744854] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for the task: (returnval){ [ 1410.744854] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]525be5a5-ef85-e7a4-38b2-a0ab4825cb38" [ 1410.744854] env[63241]: _type = "Task" [ 1410.744854] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.753572] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525be5a5-ef85-e7a4-38b2-a0ab4825cb38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.965587] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "refresh_cache-0115b03b-c828-4e8b-a4d2-c98f8ca69c66" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.965587] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquired lock "refresh_cache-0115b03b-c828-4e8b-a4d2-c98f8ca69c66" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.965587] env[63241]: DEBUG nova.network.neutron [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1411.016890] env[63241]: DEBUG nova.network.neutron [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Successfully created port: 047a4f6e-0c89-41ce-ab72-9a6d521031c4 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1411.032880] env[63241]: DEBUG nova.network.neutron [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1411.060850] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.102797] env[63241]: DEBUG nova.compute.manager [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1411.208818] env[63241]: DEBUG nova.network.neutron [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Updating instance_info_cache with network_info: [{"id": "79c31016-2c0a-49c7-bfd0-bbed6734219c", "address": "fa:16:3e:78:b7:4f", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.55", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c31016-2c", "ovs_interfaceid": "79c31016-2c0a-49c7-bfd0-bbed6734219c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.255471] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525be5a5-ef85-e7a4-38b2-a0ab4825cb38, 'name': SearchDatastore_Task, 'duration_secs': 0.009646} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.258225] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.258542] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 41182989-2537-42f0-8c37-792b8b2c5206/41182989-2537-42f0-8c37-792b8b2c5206.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1411.260392] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1016faac-97f4-4fee-b52f-0caa9c16bc16 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.265629] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for the task: (returnval){ [ 1411.265629] env[63241]: value = "task-1819996" [ 1411.265629] env[63241]: _type = "Task" [ 1411.265629] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.276647] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1819996, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.382214] env[63241]: DEBUG nova.compute.manager [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Received event network-vif-plugged-79c31016-2c0a-49c7-bfd0-bbed6734219c {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1411.382214] env[63241]: DEBUG oslo_concurrency.lockutils [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] Acquiring lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.382214] env[63241]: DEBUG oslo_concurrency.lockutils [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] Lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.382214] env[63241]: DEBUG oslo_concurrency.lockutils [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] Lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.382214] env[63241]: DEBUG nova.compute.manager [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] No waiting events found dispatching network-vif-plugged-79c31016-2c0a-49c7-bfd0-bbed6734219c {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1411.382416] env[63241]: WARNING nova.compute.manager [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Received unexpected event network-vif-plugged-79c31016-2c0a-49c7-bfd0-bbed6734219c for instance with vm_state building and task_state spawning. [ 1411.382416] env[63241]: DEBUG nova.compute.manager [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Received event network-changed-79c31016-2c0a-49c7-bfd0-bbed6734219c {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1411.382533] env[63241]: DEBUG nova.compute.manager [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Refreshing instance network info cache due to event network-changed-79c31016-2c0a-49c7-bfd0-bbed6734219c. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1411.382716] env[63241]: DEBUG oslo_concurrency.lockutils [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] Acquiring lock "refresh_cache-0115b03b-c828-4e8b-a4d2-c98f8ca69c66" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.561117] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquiring lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.561401] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.561623] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquiring lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.561808] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.561973] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.567531] env[63241]: INFO nova.compute.manager [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Terminating instance [ 1411.570156] env[63241]: DEBUG nova.compute.manager [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1411.570382] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1411.571384] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20e01a5-57b3-45c9-9dc0-885e58e2ec4e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.579359] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1411.579661] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1736e4ac-364a-41d3-ba87-0b67f9b99257 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.591686] env[63241]: DEBUG oslo_vmware.api [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for the task: (returnval){ [ 1411.591686] env[63241]: value = "task-1819997" [ 1411.591686] env[63241]: _type = "Task" [ 1411.591686] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.605156] env[63241]: DEBUG oslo_vmware.api [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819997, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.650470] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6439f22-8ef0-4937-8c26-67c330b2a3e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.659537] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ee7760-5d09-4cfe-a305-64ae5821a5d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.691797] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5213f767-a5bf-478d-b437-a1128f3b1386 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.701290] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1934f65-d73a-4719-90b7-9a4a1ad69ba9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.717053] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Releasing lock "refresh_cache-0115b03b-c828-4e8b-a4d2-c98f8ca69c66" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.717053] env[63241]: DEBUG nova.compute.manager [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Instance network_info: |[{"id": "79c31016-2c0a-49c7-bfd0-bbed6734219c", "address": "fa:16:3e:78:b7:4f", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.55", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c31016-2c", "ovs_interfaceid": "79c31016-2c0a-49c7-bfd0-bbed6734219c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1411.717431] env[63241]: DEBUG nova.compute.provider_tree [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.718919] env[63241]: DEBUG oslo_concurrency.lockutils [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] Acquired lock "refresh_cache-0115b03b-c828-4e8b-a4d2-c98f8ca69c66" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.719122] env[63241]: DEBUG nova.network.neutron [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Refreshing network info cache for port 79c31016-2c0a-49c7-bfd0-bbed6734219c {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1411.720366] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:b7:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79c31016-2c0a-49c7-bfd0-bbed6734219c', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1411.728195] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Creating folder: Project (c6a96e3f7eb649ffb15fd6a19a98fdd4). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1411.730368] env[63241]: DEBUG nova.scheduler.client.report [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1411.733597] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-748bda7e-2359-457f-be6f-db01e34c0ec2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.744690] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Created folder: Project (c6a96e3f7eb649ffb15fd6a19a98fdd4) in parent group-v376927. [ 1411.744816] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Creating folder: Instances. Parent ref: group-v377002. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1411.745053] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1c2b7b1-69f6-40d7-96ed-5df4d402ee89 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.755715] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Created folder: Instances in parent group-v377002. [ 1411.756090] env[63241]: DEBUG oslo.service.loopingcall [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1411.756284] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1411.756930] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c8df2ca-0891-426b-b821-da3e31eb3c2c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.781901] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1819996, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485647} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.783118] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 41182989-2537-42f0-8c37-792b8b2c5206/41182989-2537-42f0-8c37-792b8b2c5206.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1411.783343] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1411.783567] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1411.783567] env[63241]: value = "task-1820000" [ 1411.783567] env[63241]: _type = "Task" [ 1411.783567] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.783743] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4612bd8a-df6f-4f59-b2c2-51969b361b03 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.795507] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820000, 'name': CreateVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.796949] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for the task: (returnval){ [ 1411.796949] env[63241]: value = "task-1820001" [ 1411.796949] env[63241]: _type = "Task" [ 1411.796949] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.809842] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820001, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.100693] env[63241]: DEBUG oslo_vmware.api [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1819997, 'name': PowerOffVM_Task, 'duration_secs': 0.273687} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.101032] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1412.101225] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1412.101488] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93a58f53-e6bd-4623-a070-cfd839634f00 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.113363] env[63241]: DEBUG nova.compute.manager [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1412.140193] env[63241]: DEBUG nova.virt.hardware [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1412.140460] env[63241]: DEBUG nova.virt.hardware [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1412.140617] env[63241]: DEBUG nova.virt.hardware [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.140797] env[63241]: DEBUG nova.virt.hardware [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1412.141094] env[63241]: DEBUG nova.virt.hardware [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.141094] env[63241]: DEBUG nova.virt.hardware [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1412.141304] env[63241]: DEBUG nova.virt.hardware [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1412.141518] env[63241]: DEBUG nova.virt.hardware [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1412.141741] env[63241]: DEBUG nova.virt.hardware [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1412.141919] env[63241]: DEBUG nova.virt.hardware [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1412.142129] env[63241]: DEBUG nova.virt.hardware [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1412.143024] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf7316d-ed49-432e-bdd4-52054121f6c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.151126] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db06a16-0ddb-40e3-b39c-6b3f8a8222a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.236713] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.149s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.237257] env[63241]: DEBUG nova.compute.manager [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1412.240086] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.810s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.241950] env[63241]: INFO nova.compute.claims [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1412.249656] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1412.249894] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1412.250051] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Deleting the datastore file [datastore1] fe8eaeee-56b2-4974-a448-8f95848b3b3a {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1412.250307] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-294007fa-fba6-4c27-8f21-8414674b7e27 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.257045] env[63241]: DEBUG oslo_vmware.api [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for the task: (returnval){ [ 1412.257045] env[63241]: value = "task-1820003" [ 1412.257045] env[63241]: _type = "Task" [ 1412.257045] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.266019] env[63241]: DEBUG oslo_vmware.api [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1820003, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.295944] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820000, 'name': CreateVM_Task, 'duration_secs': 0.381355} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.296134] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1412.296883] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.297080] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.297447] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1412.297770] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9821898c-1b5a-47ba-9959-b60bad248e17 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.305995] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1412.305995] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524907ed-8768-589d-0801-cad782a018e3" [ 1412.305995] env[63241]: _type = "Task" [ 1412.305995] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.309086] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820001, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07614} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.312371] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1412.315224] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3500e668-f744-44d7-8170-e5d60dcb2e46 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.323891] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524907ed-8768-589d-0801-cad782a018e3, 'name': SearchDatastore_Task, 'duration_secs': 0.009894} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.332894] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.333176] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1412.333433] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.333586] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.333777] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1412.342768] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 41182989-2537-42f0-8c37-792b8b2c5206/41182989-2537-42f0-8c37-792b8b2c5206.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1412.345170] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63b4b50d-b45f-45a1-89d6-50c93ef933a6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.346978] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-262701c0-3fc0-4860-90f1-0e76bf33ce22 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.367808] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for the task: (returnval){ [ 1412.367808] env[63241]: value = "task-1820004" [ 1412.367808] env[63241]: _type = "Task" [ 1412.367808] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.369065] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1412.369292] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1412.372954] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11844a58-1f80-4b44-b580-7be98ee6b8a9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.381423] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820004, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.384740] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1412.384740] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52fb2700-fcd7-bc98-2e68-494170d7459f" [ 1412.384740] env[63241]: _type = "Task" [ 1412.384740] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.395742] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fb2700-fcd7-bc98-2e68-494170d7459f, 'name': SearchDatastore_Task, 'duration_secs': 0.009408} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.397032] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8da9044c-327c-4c73-a339-c1b4bc5657b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.402926] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1412.402926] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526cd2f6-45eb-4039-38e2-9ac8773c6ca2" [ 1412.402926] env[63241]: _type = "Task" [ 1412.402926] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.411215] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526cd2f6-45eb-4039-38e2-9ac8773c6ca2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.487016] env[63241]: DEBUG nova.network.neutron [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Updated VIF entry in instance network info cache for port 79c31016-2c0a-49c7-bfd0-bbed6734219c. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1412.487394] env[63241]: DEBUG nova.network.neutron [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Updating instance_info_cache with network_info: [{"id": "79c31016-2c0a-49c7-bfd0-bbed6734219c", "address": "fa:16:3e:78:b7:4f", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.55", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c31016-2c", "ovs_interfaceid": "79c31016-2c0a-49c7-bfd0-bbed6734219c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.747136] env[63241]: DEBUG nova.compute.utils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1412.751367] env[63241]: DEBUG nova.compute.manager [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1412.751539] env[63241]: DEBUG nova.network.neutron [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1412.771374] env[63241]: DEBUG oslo_vmware.api [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Task: {'id': task-1820003, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146699} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.771629] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1412.771817] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1412.771998] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1412.772519] env[63241]: INFO nova.compute.manager [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1412.772794] env[63241]: DEBUG oslo.service.loopingcall [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1412.772985] env[63241]: DEBUG nova.compute.manager [-] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1412.773096] env[63241]: DEBUG nova.network.neutron [-] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1412.785745] env[63241]: DEBUG nova.network.neutron [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Successfully updated port: 047a4f6e-0c89-41ce-ab72-9a6d521031c4 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1412.792108] env[63241]: DEBUG nova.policy [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53f5b40d7cf04b3d8702df00367b22a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38c709b68d2a40049d6d4795267987d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1412.880250] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820004, 'name': ReconfigVM_Task, 'duration_secs': 0.347411} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.880584] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 41182989-2537-42f0-8c37-792b8b2c5206/41182989-2537-42f0-8c37-792b8b2c5206.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1412.881950] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72aba167-3b58-4d4d-a0cf-90a4ad53088a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.887929] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for the task: (returnval){ [ 1412.887929] env[63241]: value = "task-1820006" [ 1412.887929] env[63241]: _type = "Task" [ 1412.887929] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.897926] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820006, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.915849] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526cd2f6-45eb-4039-38e2-9ac8773c6ca2, 'name': SearchDatastore_Task, 'duration_secs': 0.009379} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.915849] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.915849] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0115b03b-c828-4e8b-a4d2-c98f8ca69c66/0115b03b-c828-4e8b-a4d2-c98f8ca69c66.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1412.916890] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4012a2d5-403f-4cec-bde1-0cbe0b4eef73 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.923782] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1412.923782] env[63241]: value = "task-1820007" [ 1412.923782] env[63241]: _type = "Task" [ 1412.923782] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.934151] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.990688] env[63241]: DEBUG oslo_concurrency.lockutils [req-7cd77398-f336-4c35-b7d0-89194a88a702 req-33b61574-9afc-4989-8b2f-2b6e30620541 service nova] Releasing lock "refresh_cache-0115b03b-c828-4e8b-a4d2-c98f8ca69c66" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.082982] env[63241]: DEBUG nova.network.neutron [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Successfully created port: ab769350-6899-4d15-94f5-ede018f0f344 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1413.252647] env[63241]: DEBUG nova.compute.manager [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1413.291869] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "refresh_cache-d60c3a22-19fb-4826-be88-d0307810a079" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.292095] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquired lock "refresh_cache-d60c3a22-19fb-4826-be88-d0307810a079" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.292291] env[63241]: DEBUG nova.network.neutron [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1413.398713] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820006, 'name': Rename_Task, 'duration_secs': 0.145793} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.401037] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1413.403035] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2ad7f0f-7d1b-403f-ac45-bc221cd56344 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.410156] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for the task: (returnval){ [ 1413.410156] env[63241]: value = "task-1820008" [ 1413.410156] env[63241]: _type = "Task" [ 1413.410156] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.421236] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820008, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.433591] env[63241]: DEBUG nova.compute.manager [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Received event network-vif-plugged-047a4f6e-0c89-41ce-ab72-9a6d521031c4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1413.433819] env[63241]: DEBUG oslo_concurrency.lockutils [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] Acquiring lock "d60c3a22-19fb-4826-be88-d0307810a079-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.434063] env[63241]: DEBUG oslo_concurrency.lockutils [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] Lock "d60c3a22-19fb-4826-be88-d0307810a079-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.434265] env[63241]: DEBUG oslo_concurrency.lockutils [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] Lock "d60c3a22-19fb-4826-be88-d0307810a079-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.434451] env[63241]: DEBUG nova.compute.manager [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] No waiting events found dispatching network-vif-plugged-047a4f6e-0c89-41ce-ab72-9a6d521031c4 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1413.434622] env[63241]: WARNING nova.compute.manager [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Received unexpected event network-vif-plugged-047a4f6e-0c89-41ce-ab72-9a6d521031c4 for instance with vm_state building and task_state spawning. [ 1413.434786] env[63241]: DEBUG nova.compute.manager [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Received event network-changed-047a4f6e-0c89-41ce-ab72-9a6d521031c4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1413.434994] env[63241]: DEBUG nova.compute.manager [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Refreshing instance network info cache due to event network-changed-047a4f6e-0c89-41ce-ab72-9a6d521031c4. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1413.435199] env[63241]: DEBUG oslo_concurrency.lockutils [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] Acquiring lock "refresh_cache-d60c3a22-19fb-4826-be88-d0307810a079" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.439274] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820007, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482235} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.439480] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0115b03b-c828-4e8b-a4d2-c98f8ca69c66/0115b03b-c828-4e8b-a4d2-c98f8ca69c66.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1413.439744] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1413.439986] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62e4e91b-f5eb-4310-bfe1-a0c1dd012187 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.449648] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1413.449648] env[63241]: value = "task-1820009" [ 1413.449648] env[63241]: _type = "Task" [ 1413.449648] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.463850] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820009, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.591830] env[63241]: DEBUG nova.network.neutron [-] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.829147] env[63241]: DEBUG nova.network.neutron [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1413.846588] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00b6250-f42f-4b12-ab96-fa2fefb4d841 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.858827] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7859d77-8c20-4ea6-a3b5-6f6194193548 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.901146] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7bc0783-c2e5-49f8-ad14-352d6d1e792c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.909643] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-972c24cf-8cd1-4013-9c4e-c4582547e958 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.923133] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820008, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.933105] env[63241]: DEBUG nova.compute.provider_tree [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.963368] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820009, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067013} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.963792] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1413.968029] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3903f5-e944-4e7a-82da-8bb140ac4893 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.994637] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 0115b03b-c828-4e8b-a4d2-c98f8ca69c66/0115b03b-c828-4e8b-a4d2-c98f8ca69c66.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1413.997775] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ac5de25-7329-4291-a159-7b236dd61da6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.018762] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1414.018762] env[63241]: value = "task-1820010" [ 1414.018762] env[63241]: _type = "Task" [ 1414.018762] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.027586] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820010, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.097097] env[63241]: INFO nova.compute.manager [-] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Took 1.32 seconds to deallocate network for instance. [ 1414.136231] env[63241]: DEBUG nova.network.neutron [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Updating instance_info_cache with network_info: [{"id": "047a4f6e-0c89-41ce-ab72-9a6d521031c4", "address": "fa:16:3e:23:c5:c6", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.127", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap047a4f6e-0c", "ovs_interfaceid": "047a4f6e-0c89-41ce-ab72-9a6d521031c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.277350] env[63241]: DEBUG nova.compute.manager [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1414.303937] env[63241]: DEBUG nova.virt.hardware [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1414.304212] env[63241]: DEBUG nova.virt.hardware [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1414.304552] env[63241]: DEBUG nova.virt.hardware [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1414.304932] env[63241]: DEBUG nova.virt.hardware [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1414.305225] env[63241]: DEBUG nova.virt.hardware [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1414.306038] env[63241]: DEBUG nova.virt.hardware [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1414.306038] env[63241]: DEBUG nova.virt.hardware [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1414.306171] env[63241]: DEBUG nova.virt.hardware [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1414.306400] env[63241]: DEBUG nova.virt.hardware [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1414.306681] env[63241]: DEBUG nova.virt.hardware [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1414.306871] env[63241]: DEBUG nova.virt.hardware [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1414.307790] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afffd56-0cc1-429e-8c23-3af43408a753 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.316281] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9176225c-048f-4da0-865b-8ea3b0074f1a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.424147] env[63241]: DEBUG oslo_vmware.api [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820008, 'name': PowerOnVM_Task, 'duration_secs': 0.598644} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.424147] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1414.424258] env[63241]: INFO nova.compute.manager [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Took 7.78 seconds to spawn the instance on the hypervisor. [ 1414.424373] env[63241]: DEBUG nova.compute.manager [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1414.425218] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fc40f0-d4ac-4c1e-90d5-decd34a79c2f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.438541] env[63241]: DEBUG nova.scheduler.client.report [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1414.529453] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.569654] env[63241]: DEBUG nova.compute.manager [req-11d73188-2d6d-4d12-a663-86a81a714374 req-867e2daa-ec2e-4e9d-a84d-17a999526a43 service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Received event network-vif-plugged-ab769350-6899-4d15-94f5-ede018f0f344 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1414.569894] env[63241]: DEBUG oslo_concurrency.lockutils [req-11d73188-2d6d-4d12-a663-86a81a714374 req-867e2daa-ec2e-4e9d-a84d-17a999526a43 service nova] Acquiring lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.570122] env[63241]: DEBUG oslo_concurrency.lockutils [req-11d73188-2d6d-4d12-a663-86a81a714374 req-867e2daa-ec2e-4e9d-a84d-17a999526a43 service nova] Lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.570297] env[63241]: DEBUG oslo_concurrency.lockutils [req-11d73188-2d6d-4d12-a663-86a81a714374 req-867e2daa-ec2e-4e9d-a84d-17a999526a43 service nova] Lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.570465] env[63241]: DEBUG nova.compute.manager [req-11d73188-2d6d-4d12-a663-86a81a714374 req-867e2daa-ec2e-4e9d-a84d-17a999526a43 service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] No waiting events found dispatching network-vif-plugged-ab769350-6899-4d15-94f5-ede018f0f344 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1414.570637] env[63241]: WARNING nova.compute.manager [req-11d73188-2d6d-4d12-a663-86a81a714374 req-867e2daa-ec2e-4e9d-a84d-17a999526a43 service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Received unexpected event network-vif-plugged-ab769350-6899-4d15-94f5-ede018f0f344 for instance with vm_state building and task_state spawning. [ 1414.606929] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.642966] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Releasing lock "refresh_cache-d60c3a22-19fb-4826-be88-d0307810a079" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.643340] env[63241]: DEBUG nova.compute.manager [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Instance network_info: |[{"id": "047a4f6e-0c89-41ce-ab72-9a6d521031c4", "address": "fa:16:3e:23:c5:c6", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.127", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap047a4f6e-0c", "ovs_interfaceid": "047a4f6e-0c89-41ce-ab72-9a6d521031c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1414.643635] env[63241]: DEBUG oslo_concurrency.lockutils [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] Acquired lock "refresh_cache-d60c3a22-19fb-4826-be88-d0307810a079" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.643819] env[63241]: DEBUG nova.network.neutron [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Refreshing network info cache for port 047a4f6e-0c89-41ce-ab72-9a6d521031c4 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1414.645028] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:c5:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '047a4f6e-0c89-41ce-ab72-9a6d521031c4', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1414.654549] env[63241]: DEBUG oslo.service.loopingcall [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1414.655066] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1414.655307] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-698ca712-995b-405c-912b-f9f06c9a59bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.671604] env[63241]: DEBUG nova.network.neutron [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Successfully updated port: ab769350-6899-4d15-94f5-ede018f0f344 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1414.677172] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1414.677172] env[63241]: value = "task-1820011" [ 1414.677172] env[63241]: _type = "Task" [ 1414.677172] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.685880] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820011, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.944601] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.944739] env[63241]: DEBUG nova.compute.manager [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1414.948096] env[63241]: INFO nova.compute.manager [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Took 41.13 seconds to build instance. [ 1414.949240] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.401s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.951138] env[63241]: INFO nova.compute.claims [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1415.031958] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.177859] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "refresh_cache-ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.178078] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "refresh_cache-ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.178204] env[63241]: DEBUG nova.network.neutron [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1415.189624] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820011, 'name': CreateVM_Task, 'duration_secs': 0.473062} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.189803] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1415.190505] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.190672] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.190986] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1415.191282] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d5d61f1-ab33-4f33-8c8d-e0d815b4ad62 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.196306] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1415.196306] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5277fc0b-c74d-623f-294a-2b094b9ca447" [ 1415.196306] env[63241]: _type = "Task" [ 1415.196306] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.207725] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5277fc0b-c74d-623f-294a-2b094b9ca447, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.391246] env[63241]: DEBUG nova.network.neutron [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Updated VIF entry in instance network info cache for port 047a4f6e-0c89-41ce-ab72-9a6d521031c4. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1415.391767] env[63241]: DEBUG nova.network.neutron [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Updating instance_info_cache with network_info: [{"id": "047a4f6e-0c89-41ce-ab72-9a6d521031c4", "address": "fa:16:3e:23:c5:c6", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.127", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap047a4f6e-0c", "ovs_interfaceid": "047a4f6e-0c89-41ce-ab72-9a6d521031c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.449579] env[63241]: DEBUG nova.compute.utils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1415.451057] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c82460db-7082-4aee-a775-6e56918fd987 tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "41182989-2537-42f0-8c37-792b8b2c5206" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.522s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.451191] env[63241]: DEBUG nova.compute.manager [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1415.451350] env[63241]: DEBUG nova.network.neutron [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1415.506673] env[63241]: DEBUG nova.policy [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ecfeb57e9ebb4929867e7b4de8716c6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6a96e3f7eb649ffb15fd6a19a98fdd4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1415.529399] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820010, 'name': ReconfigVM_Task, 'duration_secs': 1.157207} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.529676] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 0115b03b-c828-4e8b-a4d2-c98f8ca69c66/0115b03b-c828-4e8b-a4d2-c98f8ca69c66.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1415.531468] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-173670ef-26b1-4b41-935d-2ff5dede1055 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.536835] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1415.536835] env[63241]: value = "task-1820013" [ 1415.536835] env[63241]: _type = "Task" [ 1415.536835] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.544954] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820013, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.708343] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5277fc0b-c74d-623f-294a-2b094b9ca447, 'name': SearchDatastore_Task, 'duration_secs': 0.009709} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.708661] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.708899] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1415.709466] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.709466] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.709466] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1415.709682] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ef56348-8b22-4c2a-843c-52b6589ca6a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.718655] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1415.718807] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1415.719541] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70e8aac4-1a14-4581-af7f-7f72a87cc390 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.724636] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1415.724636] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52df47f5-085b-1a78-19f5-45275776d80f" [ 1415.724636] env[63241]: _type = "Task" [ 1415.724636] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.725518] env[63241]: DEBUG nova.network.neutron [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1415.734679] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52df47f5-085b-1a78-19f5-45275776d80f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.763229] env[63241]: DEBUG nova.network.neutron [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Successfully created port: 34cd3144-d5ff-455a-a20e-09fe6e3896ba {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1415.895182] env[63241]: DEBUG oslo_concurrency.lockutils [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] Releasing lock "refresh_cache-d60c3a22-19fb-4826-be88-d0307810a079" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.895182] env[63241]: DEBUG nova.compute.manager [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Received event network-vif-deleted-af23e4e4-1f35-4054-9d3e-03ba04cc1223 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1415.895347] env[63241]: INFO nova.compute.manager [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Neutron deleted interface af23e4e4-1f35-4054-9d3e-03ba04cc1223; detaching it from the instance and deleting it from the info cache [ 1415.895462] env[63241]: DEBUG nova.network.neutron [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.901491] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a360f78b-0531-4aea-91bb-21bb62b2224f tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquiring lock "interface-41182989-2537-42f0-8c37-792b8b2c5206-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.901716] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a360f78b-0531-4aea-91bb-21bb62b2224f tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "interface-41182989-2537-42f0-8c37-792b8b2c5206-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.902028] env[63241]: DEBUG nova.objects.instance [None req-a360f78b-0531-4aea-91bb-21bb62b2224f tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lazy-loading 'flavor' on Instance uuid 41182989-2537-42f0-8c37-792b8b2c5206 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1415.954557] env[63241]: DEBUG nova.compute.manager [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1415.957234] env[63241]: DEBUG nova.compute.manager [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1416.047424] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820013, 'name': Rename_Task, 'duration_secs': 0.139308} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.047708] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1416.047948] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b954dcc4-5e7c-416d-a059-6d638708e3f8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.055271] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1416.055271] env[63241]: value = "task-1820014" [ 1416.055271] env[63241]: _type = "Task" [ 1416.055271] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.065164] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820014, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.094861] env[63241]: DEBUG nova.network.neutron [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Updating instance_info_cache with network_info: [{"id": "ab769350-6899-4d15-94f5-ede018f0f344", "address": "fa:16:3e:f7:8c:71", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab769350-68", "ovs_interfaceid": "ab769350-6899-4d15-94f5-ede018f0f344", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.237201] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52df47f5-085b-1a78-19f5-45275776d80f, 'name': SearchDatastore_Task, 'duration_secs': 0.009063} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.240505] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d67ff203-d25d-493d-950e-e15e498717c6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.246113] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1416.246113] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d3b86a-1951-b3ff-da51-eb7dbff191b7" [ 1416.246113] env[63241]: _type = "Task" [ 1416.246113] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.252774] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d3b86a-1951-b3ff-da51-eb7dbff191b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.398412] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56e202f3-56cb-4f68-a82f-d4ec2f6c5891 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.405392] env[63241]: DEBUG nova.objects.instance [None req-a360f78b-0531-4aea-91bb-21bb62b2224f tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lazy-loading 'pci_requests' on Instance uuid 41182989-2537-42f0-8c37-792b8b2c5206 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1416.409288] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f176d2-5b5a-44a8-89da-b9ad80fde8d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.414639] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6faf85b9-f963-4015-a256-774e500c2cd2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.430534] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730369a6-255c-49ed-b06f-8fea1510b178 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.448707] env[63241]: DEBUG nova.compute.manager [req-2cd83bab-d657-43ca-90bb-65ffb6769380 req-e52976c3-3237-49e3-b11e-e43131d343ef service nova] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Detach interface failed, port_id=af23e4e4-1f35-4054-9d3e-03ba04cc1223, reason: Instance fe8eaeee-56b2-4974-a448-8f95848b3b3a could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1416.482995] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc22d288-4ed5-47ba-8de9-0f47998f0784 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.491279] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61433c6-41f0-4151-b458-0c000bb264da {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.496348] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.507536] env[63241]: DEBUG nova.compute.provider_tree [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.567302] env[63241]: DEBUG oslo_vmware.api [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820014, 'name': PowerOnVM_Task, 'duration_secs': 0.454105} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.567612] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1416.567862] env[63241]: INFO nova.compute.manager [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Took 7.11 seconds to spawn the instance on the hypervisor. [ 1416.568267] env[63241]: DEBUG nova.compute.manager [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1416.568879] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d1383e-47b0-41e2-b06a-6acc7774a071 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.597708] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "refresh_cache-ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.598051] env[63241]: DEBUG nova.compute.manager [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Instance network_info: |[{"id": "ab769350-6899-4d15-94f5-ede018f0f344", "address": "fa:16:3e:f7:8c:71", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab769350-68", "ovs_interfaceid": "ab769350-6899-4d15-94f5-ede018f0f344", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1416.598592] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:8c:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '329d0e4b-4190-484a-8560-9356dc31beca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab769350-6899-4d15-94f5-ede018f0f344', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1416.606696] env[63241]: DEBUG oslo.service.loopingcall [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1416.606954] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1416.607214] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f57f66b-f92d-4701-84aa-c9b2081455f1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.627873] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1416.627873] env[63241]: value = "task-1820015" [ 1416.627873] env[63241]: _type = "Task" [ 1416.627873] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.635677] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820015, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.756206] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d3b86a-1951-b3ff-da51-eb7dbff191b7, 'name': SearchDatastore_Task, 'duration_secs': 0.008748} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.756515] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.756789] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] d60c3a22-19fb-4826-be88-d0307810a079/d60c3a22-19fb-4826-be88-d0307810a079.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1416.757061] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81c7ee41-39bc-4ff2-a35b-dae45b77a3b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.763442] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1416.763442] env[63241]: value = "task-1820016" [ 1416.763442] env[63241]: _type = "Task" [ 1416.763442] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.773521] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.913098] env[63241]: DEBUG nova.objects.base [None req-a360f78b-0531-4aea-91bb-21bb62b2224f tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Object Instance<41182989-2537-42f0-8c37-792b8b2c5206> lazy-loaded attributes: flavor,pci_requests {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1416.913359] env[63241]: DEBUG nova.network.neutron [None req-a360f78b-0531-4aea-91bb-21bb62b2224f tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1416.984050] env[63241]: DEBUG nova.compute.manager [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1417.010799] env[63241]: DEBUG nova.scheduler.client.report [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1417.016630] env[63241]: DEBUG nova.virt.hardware [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1417.016899] env[63241]: DEBUG nova.virt.hardware [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1417.017074] env[63241]: DEBUG nova.virt.hardware [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1417.017266] env[63241]: DEBUG nova.virt.hardware [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1417.017413] env[63241]: DEBUG nova.virt.hardware [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1417.017597] env[63241]: DEBUG nova.virt.hardware [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1417.017812] env[63241]: DEBUG nova.virt.hardware [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1417.017972] env[63241]: DEBUG nova.virt.hardware [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1417.018151] env[63241]: DEBUG nova.virt.hardware [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1417.018306] env[63241]: DEBUG nova.virt.hardware [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1417.018487] env[63241]: DEBUG nova.virt.hardware [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1417.021149] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becc1b4f-e8a6-4a85-8cf6-27a1ea99a2f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.029428] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-909d333b-7e49-41cc-b0bd-c9f118b1445a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.035180] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a360f78b-0531-4aea-91bb-21bb62b2224f tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "interface-41182989-2537-42f0-8c37-792b8b2c5206-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.133s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.066659] env[63241]: DEBUG nova.compute.manager [req-b231626e-6225-4656-a154-1c58c32a32aa req-55ed243f-1459-4b24-8437-deb5b6e98ad4 service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Received event network-changed-ab769350-6899-4d15-94f5-ede018f0f344 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1417.066910] env[63241]: DEBUG nova.compute.manager [req-b231626e-6225-4656-a154-1c58c32a32aa req-55ed243f-1459-4b24-8437-deb5b6e98ad4 service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Refreshing instance network info cache due to event network-changed-ab769350-6899-4d15-94f5-ede018f0f344. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1417.067161] env[63241]: DEBUG oslo_concurrency.lockutils [req-b231626e-6225-4656-a154-1c58c32a32aa req-55ed243f-1459-4b24-8437-deb5b6e98ad4 service nova] Acquiring lock "refresh_cache-ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.067312] env[63241]: DEBUG oslo_concurrency.lockutils [req-b231626e-6225-4656-a154-1c58c32a32aa req-55ed243f-1459-4b24-8437-deb5b6e98ad4 service nova] Acquired lock "refresh_cache-ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.067483] env[63241]: DEBUG nova.network.neutron [req-b231626e-6225-4656-a154-1c58c32a32aa req-55ed243f-1459-4b24-8437-deb5b6e98ad4 service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Refreshing network info cache for port ab769350-6899-4d15-94f5-ede018f0f344 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1417.088050] env[63241]: INFO nova.compute.manager [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Took 42.76 seconds to build instance. [ 1417.139034] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820015, 'name': CreateVM_Task, 'duration_secs': 0.333603} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.139151] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1417.139924] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.140073] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.140400] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1417.140675] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39a3161b-79f5-4a39-bd0b-40e792f0c3f9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.146106] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1417.146106] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52eb262b-a2df-f3fc-6107-ebfec91724a0" [ 1417.146106] env[63241]: _type = "Task" [ 1417.146106] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.154666] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52eb262b-a2df-f3fc-6107-ebfec91724a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.273022] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820016, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.295587] env[63241]: DEBUG nova.network.neutron [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Successfully updated port: 34cd3144-d5ff-455a-a20e-09fe6e3896ba {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1417.524470] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.525598] env[63241]: DEBUG nova.compute.manager [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1417.528211] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.286s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.529600] env[63241]: INFO nova.compute.claims [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1417.590815] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b94b9ab5-cda2-4ac9-ba92-8657e2db4521 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.340s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.656507] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52eb262b-a2df-f3fc-6107-ebfec91724a0, 'name': SearchDatastore_Task, 'duration_secs': 0.045203} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.656861] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.657108] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1417.657343] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.657490] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.657668] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1417.657963] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0af621a-c60e-4292-8124-dc01a4aab232 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.670368] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1417.670546] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1417.671352] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a32ccf5-4cc5-488c-9135-fff74df460a2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.678309] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1417.678309] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5202a856-be2c-7021-05d9-6b82adda5141" [ 1417.678309] env[63241]: _type = "Task" [ 1417.678309] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.685666] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5202a856-be2c-7021-05d9-6b82adda5141, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.772572] env[63241]: DEBUG nova.network.neutron [req-b231626e-6225-4656-a154-1c58c32a32aa req-55ed243f-1459-4b24-8437-deb5b6e98ad4 service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Updated VIF entry in instance network info cache for port ab769350-6899-4d15-94f5-ede018f0f344. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1417.772918] env[63241]: DEBUG nova.network.neutron [req-b231626e-6225-4656-a154-1c58c32a32aa req-55ed243f-1459-4b24-8437-deb5b6e98ad4 service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Updating instance_info_cache with network_info: [{"id": "ab769350-6899-4d15-94f5-ede018f0f344", "address": "fa:16:3e:f7:8c:71", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab769350-68", "ovs_interfaceid": "ab769350-6899-4d15-94f5-ede018f0f344", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.777305] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820016, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572557} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.777827] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] d60c3a22-19fb-4826-be88-d0307810a079/d60c3a22-19fb-4826-be88-d0307810a079.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1417.778187] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1417.778467] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-89ef8ebe-c79b-4920-949a-6f1274215f44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.786045] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1417.786045] env[63241]: value = "task-1820018" [ 1417.786045] env[63241]: _type = "Task" [ 1417.786045] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.794057] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820018, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.799866] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "refresh_cache-5060e745-08d0-429e-8780-bfdad7a29f30" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.799866] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquired lock "refresh_cache-5060e745-08d0-429e-8780-bfdad7a29f30" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.799866] env[63241]: DEBUG nova.network.neutron [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1418.034442] env[63241]: DEBUG nova.compute.utils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1418.037740] env[63241]: DEBUG nova.compute.manager [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1418.037883] env[63241]: DEBUG nova.network.neutron [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1418.097223] env[63241]: DEBUG nova.compute.manager [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1418.101232] env[63241]: DEBUG nova.policy [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '241be7981c2a4e8392411d55e0beb0b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aec32854f143447db1f2158591c69165', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1418.189778] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5202a856-be2c-7021-05d9-6b82adda5141, 'name': SearchDatastore_Task, 'duration_secs': 0.061976} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.190530] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ca96290-efae-4396-ace3-74c1b4d746e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.195199] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1418.195199] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]525ec560-fa86-f9bf-dcc1-d80243faffd2" [ 1418.195199] env[63241]: _type = "Task" [ 1418.195199] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.203093] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525ec560-fa86-f9bf-dcc1-d80243faffd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.278482] env[63241]: DEBUG oslo_concurrency.lockutils [req-b231626e-6225-4656-a154-1c58c32a32aa req-55ed243f-1459-4b24-8437-deb5b6e98ad4 service nova] Releasing lock "refresh_cache-ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.296224] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820018, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.272784} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.296529] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1418.297354] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd557673-684a-4f9f-abd9-a7d73f354ad2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.320719] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] d60c3a22-19fb-4826-be88-d0307810a079/d60c3a22-19fb-4826-be88-d0307810a079.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1418.321357] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf3d5e17-f959-4686-807b-192c756689fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.343563] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1418.343563] env[63241]: value = "task-1820019" [ 1418.343563] env[63241]: _type = "Task" [ 1418.343563] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.352547] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820019, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.357200] env[63241]: DEBUG nova.network.neutron [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1418.437823] env[63241]: DEBUG nova.network.neutron [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Successfully created port: 30a83f5f-b061-401d-84a9-eb0eefd0af82 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1418.538285] env[63241]: DEBUG nova.compute.manager [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1418.582515] env[63241]: DEBUG nova.network.neutron [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Updating instance_info_cache with network_info: [{"id": "34cd3144-d5ff-455a-a20e-09fe6e3896ba", "address": "fa:16:3e:0a:44:9b", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34cd3144-d5", "ovs_interfaceid": "34cd3144-d5ff-455a-a20e-09fe6e3896ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.623371] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.707806] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525ec560-fa86-f9bf-dcc1-d80243faffd2, 'name': SearchDatastore_Task, 'duration_secs': 0.056801} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.708164] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.708470] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] ac35fa03-aeca-4e18-84ab-cb80bb4cabfd/ac35fa03-aeca-4e18-84ab-cb80bb4cabfd.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1418.708762] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfde80cd-c2d2-4c8e-89ae-6db4dfa1b826 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.716528] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1418.716528] env[63241]: value = "task-1820020" [ 1418.716528] env[63241]: _type = "Task" [ 1418.716528] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.728500] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820020, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.858518] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820019, 'name': ReconfigVM_Task, 'duration_secs': 0.3042} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.859183] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Reconfigured VM instance instance-00000018 to attach disk [datastore1] d60c3a22-19fb-4826-be88-d0307810a079/d60c3a22-19fb-4826-be88-d0307810a079.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1418.859895] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-32a5ba7b-cc89-4d4a-8013-6bfbe4c03783 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.872016] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1418.872016] env[63241]: value = "task-1820021" [ 1418.872016] env[63241]: _type = "Task" [ 1418.872016] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.880285] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820021, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.050234] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquiring lock "41182989-2537-42f0-8c37-792b8b2c5206" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.050234] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "41182989-2537-42f0-8c37-792b8b2c5206" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.050234] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquiring lock "41182989-2537-42f0-8c37-792b8b2c5206-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.050430] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "41182989-2537-42f0-8c37-792b8b2c5206-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.051372] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "41182989-2537-42f0-8c37-792b8b2c5206-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.052588] env[63241]: INFO nova.compute.manager [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Terminating instance [ 1419.056871] env[63241]: DEBUG nova.compute.manager [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1419.057088] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1419.058014] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186da754-e71d-4384-89b6-57421f409337 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.068922] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1419.069070] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf9196b8-1d00-409e-b4e9-278be1959cd4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.077093] env[63241]: DEBUG oslo_vmware.api [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for the task: (returnval){ [ 1419.077093] env[63241]: value = "task-1820023" [ 1419.077093] env[63241]: _type = "Task" [ 1419.077093] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.088299] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Releasing lock "refresh_cache-5060e745-08d0-429e-8780-bfdad7a29f30" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.089277] env[63241]: DEBUG nova.compute.manager [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Instance network_info: |[{"id": "34cd3144-d5ff-455a-a20e-09fe6e3896ba", "address": "fa:16:3e:0a:44:9b", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34cd3144-d5", "ovs_interfaceid": "34cd3144-d5ff-455a-a20e-09fe6e3896ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1419.089277] env[63241]: DEBUG oslo_vmware.api [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.090304] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:44:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34cd3144-d5ff-455a-a20e-09fe6e3896ba', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1419.098435] env[63241]: DEBUG oslo.service.loopingcall [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1419.099481] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33e08f3-0934-4b16-bfa4-3d530a181371 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.103411] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1419.105352] env[63241]: DEBUG nova.compute.manager [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Received event network-vif-plugged-34cd3144-d5ff-455a-a20e-09fe6e3896ba {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1419.105556] env[63241]: DEBUG oslo_concurrency.lockutils [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] Acquiring lock "5060e745-08d0-429e-8780-bfdad7a29f30-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.105764] env[63241]: DEBUG oslo_concurrency.lockutils [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] Lock "5060e745-08d0-429e-8780-bfdad7a29f30-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.105954] env[63241]: DEBUG oslo_concurrency.lockutils [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] Lock "5060e745-08d0-429e-8780-bfdad7a29f30-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.106100] env[63241]: DEBUG nova.compute.manager [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] No waiting events found dispatching network-vif-plugged-34cd3144-d5ff-455a-a20e-09fe6e3896ba {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1419.106269] env[63241]: WARNING nova.compute.manager [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Received unexpected event network-vif-plugged-34cd3144-d5ff-455a-a20e-09fe6e3896ba for instance with vm_state building and task_state spawning. [ 1419.106427] env[63241]: DEBUG nova.compute.manager [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Received event network-changed-34cd3144-d5ff-455a-a20e-09fe6e3896ba {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1419.106596] env[63241]: DEBUG nova.compute.manager [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Refreshing instance network info cache due to event network-changed-34cd3144-d5ff-455a-a20e-09fe6e3896ba. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1419.106879] env[63241]: DEBUG oslo_concurrency.lockutils [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] Acquiring lock "refresh_cache-5060e745-08d0-429e-8780-bfdad7a29f30" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.107075] env[63241]: DEBUG oslo_concurrency.lockutils [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] Acquired lock "refresh_cache-5060e745-08d0-429e-8780-bfdad7a29f30" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.107244] env[63241]: DEBUG nova.network.neutron [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Refreshing network info cache for port 34cd3144-d5ff-455a-a20e-09fe6e3896ba {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1419.108487] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2993337-af4f-4623-9e5d-4e2c735ed6af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.132933] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5884c11a-56ed-4b47-be42-ab9b7c42c06d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.138514] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1419.138514] env[63241]: value = "task-1820024" [ 1419.138514] env[63241]: _type = "Task" [ 1419.138514] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.179197] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8582c565-abdd-41fc-89f7-e7e29325ade2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.182045] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820024, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.191479] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a2ec35-397a-404f-addf-ccb5c19df312 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.208824] env[63241]: DEBUG nova.compute.provider_tree [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1419.227771] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820020, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.383315] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820021, 'name': Rename_Task, 'duration_secs': 0.155675} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.383630] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1419.383937] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9de83aa-b8b5-4f16-b0e4-34150dac1254 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.390802] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1419.390802] env[63241]: value = "task-1820025" [ 1419.390802] env[63241]: _type = "Task" [ 1419.390802] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.399468] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820025, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.441704] env[63241]: DEBUG nova.network.neutron [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Updated VIF entry in instance network info cache for port 34cd3144-d5ff-455a-a20e-09fe6e3896ba. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1419.442090] env[63241]: DEBUG nova.network.neutron [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Updating instance_info_cache with network_info: [{"id": "34cd3144-d5ff-455a-a20e-09fe6e3896ba", "address": "fa:16:3e:0a:44:9b", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34cd3144-d5", "ovs_interfaceid": "34cd3144-d5ff-455a-a20e-09fe6e3896ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.553619] env[63241]: DEBUG nova.compute.manager [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1419.583649] env[63241]: DEBUG nova.virt.hardware [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1419.583899] env[63241]: DEBUG nova.virt.hardware [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1419.584066] env[63241]: DEBUG nova.virt.hardware [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1419.584250] env[63241]: DEBUG nova.virt.hardware [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1419.584540] env[63241]: DEBUG nova.virt.hardware [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1419.584699] env[63241]: DEBUG nova.virt.hardware [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1419.584957] env[63241]: DEBUG nova.virt.hardware [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1419.585138] env[63241]: DEBUG nova.virt.hardware [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1419.585291] env[63241]: DEBUG nova.virt.hardware [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1419.585520] env[63241]: DEBUG nova.virt.hardware [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1419.585705] env[63241]: DEBUG nova.virt.hardware [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1419.586996] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83eb8cb1-4597-41bc-a4d6-211e2e8eaedd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.595613] env[63241]: DEBUG oslo_vmware.api [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820023, 'name': PowerOffVM_Task, 'duration_secs': 0.30691} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.597763] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1419.597941] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1419.598250] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5417e1c8-5b7a-4bda-ab92-e464539e60b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.600706] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84881e29-8d06-4e07-8384-a54a4ae0325d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.648696] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820024, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.715593] env[63241]: DEBUG nova.scheduler.client.report [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1419.731892] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820020, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662202} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.731892] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] ac35fa03-aeca-4e18-84ab-cb80bb4cabfd/ac35fa03-aeca-4e18-84ab-cb80bb4cabfd.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1419.731892] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1419.731892] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa841666-a1bd-4ac5-aae5-4a8bccc433c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.737524] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1419.737524] env[63241]: value = "task-1820027" [ 1419.737524] env[63241]: _type = "Task" [ 1419.737524] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.746463] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.901226] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820025, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.945300] env[63241]: DEBUG oslo_concurrency.lockutils [req-0a3324f3-2635-4ffc-8d54-c233e300911e req-118ef05c-f6a0-42e5-b2ac-4820df649892 service nova] Releasing lock "refresh_cache-5060e745-08d0-429e-8780-bfdad7a29f30" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.958733] env[63241]: DEBUG nova.network.neutron [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Successfully updated port: 30a83f5f-b061-401d-84a9-eb0eefd0af82 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1420.056335] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1420.056490] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1420.056770] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Deleting the datastore file [datastore1] 41182989-2537-42f0-8c37-792b8b2c5206 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1420.057380] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29e0cc44-6623-4b53-a725-25447fde3620 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.065030] env[63241]: DEBUG oslo_vmware.api [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for the task: (returnval){ [ 1420.065030] env[63241]: value = "task-1820028" [ 1420.065030] env[63241]: _type = "Task" [ 1420.065030] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.075455] env[63241]: DEBUG oslo_vmware.api [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.149642] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820024, 'name': CreateVM_Task, 'duration_secs': 0.538433} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.149831] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1420.150506] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.150673] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.150990] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1420.151253] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-657c1f49-8aed-46da-b486-54b5631300f6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.156094] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1420.156094] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52928359-bcd4-6d67-e0b7-5fc9ef80a0af" [ 1420.156094] env[63241]: _type = "Task" [ 1420.156094] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.163924] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52928359-bcd4-6d67-e0b7-5fc9ef80a0af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.225721] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.697s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.226503] env[63241]: DEBUG nova.compute.manager [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1420.229790] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.034s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.230072] env[63241]: DEBUG nova.objects.instance [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lazy-loading 'resources' on Instance uuid 11b1888e-95ec-4166-9219-0c38f8817dd4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1420.248432] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820027, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.277777} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.249422] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1420.249499] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299814f2-07ed-4ed3-8508-48cc84a3ef1e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.272409] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] ac35fa03-aeca-4e18-84ab-cb80bb4cabfd/ac35fa03-aeca-4e18-84ab-cb80bb4cabfd.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1420.273124] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d9b3c23-c108-43bd-a4a0-bfbba1e2dc1c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.294151] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1420.294151] env[63241]: value = "task-1820029" [ 1420.294151] env[63241]: _type = "Task" [ 1420.294151] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.305306] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820029, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.401129] env[63241]: DEBUG oslo_vmware.api [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820025, 'name': PowerOnVM_Task, 'duration_secs': 0.71148} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.401129] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1420.401129] env[63241]: INFO nova.compute.manager [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1420.401324] env[63241]: DEBUG nova.compute.manager [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1420.402073] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d9ef27-b415-40aa-864e-0433ff1912e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.461347] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquiring lock "refresh_cache-40217405-dcba-48cf-9d92-4122390d9fa8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.461503] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquired lock "refresh_cache-40217405-dcba-48cf-9d92-4122390d9fa8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.461657] env[63241]: DEBUG nova.network.neutron [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1420.575955] env[63241]: DEBUG oslo_vmware.api [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.666461] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52928359-bcd4-6d67-e0b7-5fc9ef80a0af, 'name': SearchDatastore_Task, 'duration_secs': 0.080222} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.666650] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.666769] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1420.667038] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.667167] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.667344] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1420.667609] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd22e64a-2343-4581-9290-1914cef5e339 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.678663] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1420.678834] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1420.679547] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42d1c2f6-be15-4b88-80b9-2722a9aa1aff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.684643] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1420.684643] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e95ffb-1e7a-98a1-475e-e05c33524716" [ 1420.684643] env[63241]: _type = "Task" [ 1420.684643] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.692153] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e95ffb-1e7a-98a1-475e-e05c33524716, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.733167] env[63241]: DEBUG nova.compute.utils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1420.737333] env[63241]: DEBUG nova.compute.manager [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1420.737511] env[63241]: DEBUG nova.network.neutron [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1420.804547] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820029, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.808254] env[63241]: DEBUG nova.policy [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dbaf926b7294426ea90de8c089597ec0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '391c095b46d94ceb97fb48dcddf60d94', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1420.924602] env[63241]: INFO nova.compute.manager [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Took 41.99 seconds to build instance. [ 1421.019992] env[63241]: DEBUG nova.network.neutron [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1421.075268] env[63241]: DEBUG oslo_vmware.api [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.148084] env[63241]: DEBUG nova.network.neutron [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Successfully created port: 0546f3d6-e4c2-463d-8a45-8f00ad6722d1 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1421.195458] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e95ffb-1e7a-98a1-475e-e05c33524716, 'name': SearchDatastore_Task, 'duration_secs': 0.021252} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.201622] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b51bafc2-4fa6-41ba-9a30-ac7807ecd4b4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.206922] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1421.206922] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521a7c27-5ddb-6ba4-f24c-223d51c7d63c" [ 1421.206922] env[63241]: _type = "Task" [ 1421.206922] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.217550] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521a7c27-5ddb-6ba4-f24c-223d51c7d63c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.237935] env[63241]: DEBUG nova.compute.manager [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1421.284689] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8570526b-7882-4418-a333-9e6e2cce652d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.289621] env[63241]: DEBUG nova.network.neutron [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Updating instance_info_cache with network_info: [{"id": "30a83f5f-b061-401d-84a9-eb0eefd0af82", "address": "fa:16:3e:d9:b4:b3", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30a83f5f-b0", "ovs_interfaceid": "30a83f5f-b061-401d-84a9-eb0eefd0af82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.294542] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd2aa1c-dd84-4e02-af95-b76ede52975f tempest-ServersAdminNegativeTestJSON-15652427 tempest-ServersAdminNegativeTestJSON-15652427-project-admin] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Suspending the VM {{(pid=63241) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1421.298369] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-07206560-1c75-4e8d-86ec-16a9ca2212e3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.315684] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820029, 'name': ReconfigVM_Task, 'duration_secs': 0.598933} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.317145] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Reconfigured VM instance instance-00000019 to attach disk [datastore1] ac35fa03-aeca-4e18-84ab-cb80bb4cabfd/ac35fa03-aeca-4e18-84ab-cb80bb4cabfd.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1421.317876] env[63241]: DEBUG oslo_vmware.api [None req-2cd2aa1c-dd84-4e02-af95-b76ede52975f tempest-ServersAdminNegativeTestJSON-15652427 tempest-ServersAdminNegativeTestJSON-15652427-project-admin] Waiting for the task: (returnval){ [ 1421.317876] env[63241]: value = "task-1820030" [ 1421.317876] env[63241]: _type = "Task" [ 1421.317876] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.318757] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f771939-8f46-4b8f-bad1-a24d5793acc0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.322749] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a174ffe2-a439-4aea-86a0-fa963f9c5074 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.335119] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9d3663-43a3-4982-a053-913c794a3065 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.338344] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1421.338344] env[63241]: value = "task-1820031" [ 1421.338344] env[63241]: _type = "Task" [ 1421.338344] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.339361] env[63241]: DEBUG oslo_vmware.api [None req-2cd2aa1c-dd84-4e02-af95-b76ede52975f tempest-ServersAdminNegativeTestJSON-15652427 tempest-ServersAdminNegativeTestJSON-15652427-project-admin] Task: {'id': task-1820030, 'name': SuspendVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.372776] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0ac727-56d2-4710-8aff-ee7500ac85d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.375510] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820031, 'name': Rename_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.381099] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dba0294-daa1-4d93-b4f0-b2ceadeae23c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.387313] env[63241]: DEBUG nova.compute.manager [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Received event network-vif-plugged-30a83f5f-b061-401d-84a9-eb0eefd0af82 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1421.387519] env[63241]: DEBUG oslo_concurrency.lockutils [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] Acquiring lock "40217405-dcba-48cf-9d92-4122390d9fa8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.387724] env[63241]: DEBUG oslo_concurrency.lockutils [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] Lock "40217405-dcba-48cf-9d92-4122390d9fa8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.387892] env[63241]: DEBUG oslo_concurrency.lockutils [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] Lock "40217405-dcba-48cf-9d92-4122390d9fa8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.388166] env[63241]: DEBUG nova.compute.manager [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] No waiting events found dispatching network-vif-plugged-30a83f5f-b061-401d-84a9-eb0eefd0af82 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1421.388294] env[63241]: WARNING nova.compute.manager [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Received unexpected event network-vif-plugged-30a83f5f-b061-401d-84a9-eb0eefd0af82 for instance with vm_state building and task_state spawning. [ 1421.388457] env[63241]: DEBUG nova.compute.manager [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Received event network-changed-30a83f5f-b061-401d-84a9-eb0eefd0af82 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1421.388679] env[63241]: DEBUG nova.compute.manager [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Refreshing instance network info cache due to event network-changed-30a83f5f-b061-401d-84a9-eb0eefd0af82. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1421.388772] env[63241]: DEBUG oslo_concurrency.lockutils [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] Acquiring lock "refresh_cache-40217405-dcba-48cf-9d92-4122390d9fa8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.399775] env[63241]: DEBUG nova.compute.provider_tree [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1421.427773] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7b28abb6-fa46-4153-ad25-b9a923c42f16 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "d60c3a22-19fb-4826-be88-d0307810a079" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.013s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.576007] env[63241]: DEBUG oslo_vmware.api [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Task: {'id': task-1820028, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.061564} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.576724] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1421.576924] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1421.577137] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1421.577305] env[63241]: INFO nova.compute.manager [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Took 2.52 seconds to destroy the instance on the hypervisor. [ 1421.577617] env[63241]: DEBUG oslo.service.loopingcall [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1421.577857] env[63241]: DEBUG nova.compute.manager [-] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1421.577941] env[63241]: DEBUG nova.network.neutron [-] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1421.717549] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521a7c27-5ddb-6ba4-f24c-223d51c7d63c, 'name': SearchDatastore_Task, 'duration_secs': 0.011079} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.717861] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.718139] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 5060e745-08d0-429e-8780-bfdad7a29f30/5060e745-08d0-429e-8780-bfdad7a29f30.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1421.718392] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d729c3a7-580a-45bb-8261-2ec351213d1e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.727502] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1421.727502] env[63241]: value = "task-1820032" [ 1421.727502] env[63241]: _type = "Task" [ 1421.727502] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.735218] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.799474] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Releasing lock "refresh_cache-40217405-dcba-48cf-9d92-4122390d9fa8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.799866] env[63241]: DEBUG nova.compute.manager [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Instance network_info: |[{"id": "30a83f5f-b061-401d-84a9-eb0eefd0af82", "address": "fa:16:3e:d9:b4:b3", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30a83f5f-b0", "ovs_interfaceid": "30a83f5f-b061-401d-84a9-eb0eefd0af82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1421.800313] env[63241]: DEBUG oslo_concurrency.lockutils [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] Acquired lock "refresh_cache-40217405-dcba-48cf-9d92-4122390d9fa8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.800562] env[63241]: DEBUG nova.network.neutron [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Refreshing network info cache for port 30a83f5f-b061-401d-84a9-eb0eefd0af82 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1421.802178] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:b4:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30a83f5f-b061-401d-84a9-eb0eefd0af82', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1421.810108] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Creating folder: Project (aec32854f143447db1f2158591c69165). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1421.810646] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0715d3dd-94e2-41fc-9ec8-eeff059bc3e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.823094] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Created folder: Project (aec32854f143447db1f2158591c69165) in parent group-v376927. [ 1421.823408] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Creating folder: Instances. Parent ref: group-v377009. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1421.823707] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb224e33-08c3-4430-885e-591a57e455f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.833576] env[63241]: DEBUG oslo_vmware.api [None req-2cd2aa1c-dd84-4e02-af95-b76ede52975f tempest-ServersAdminNegativeTestJSON-15652427 tempest-ServersAdminNegativeTestJSON-15652427-project-admin] Task: {'id': task-1820030, 'name': SuspendVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.836104] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Created folder: Instances in parent group-v377009. [ 1421.836337] env[63241]: DEBUG oslo.service.loopingcall [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1421.836525] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1421.836754] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fd48591-e783-484e-bd3d-979346e71432 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.863670] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820031, 'name': Rename_Task, 'duration_secs': 0.478543} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.868027] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1421.868027] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1421.868027] env[63241]: value = "task-1820035" [ 1421.868027] env[63241]: _type = "Task" [ 1421.868027] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.868027] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2a22329-8bd8-4fc6-b688-96b6e9c6f9fc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.875797] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820035, 'name': CreateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.877297] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1421.877297] env[63241]: value = "task-1820036" [ 1421.877297] env[63241]: _type = "Task" [ 1421.877297] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.887501] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.902880] env[63241]: DEBUG nova.scheduler.client.report [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1421.929717] env[63241]: DEBUG nova.compute.manager [req-fe26168c-5e1e-4c0a-a9e5-64926f974339 req-473eba9f-47ab-4a84-9368-44644f01f9ad service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Received event network-vif-deleted-985a0245-a31d-4625-9329-9eccd849fb4d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1421.929940] env[63241]: INFO nova.compute.manager [req-fe26168c-5e1e-4c0a-a9e5-64926f974339 req-473eba9f-47ab-4a84-9368-44644f01f9ad service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Neutron deleted interface 985a0245-a31d-4625-9329-9eccd849fb4d; detaching it from the instance and deleting it from the info cache [ 1421.930130] env[63241]: DEBUG nova.network.neutron [req-fe26168c-5e1e-4c0a-a9e5-64926f974339 req-473eba9f-47ab-4a84-9368-44644f01f9ad service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.931899] env[63241]: DEBUG nova.compute.manager [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1422.238177] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820032, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.248828] env[63241]: DEBUG nova.compute.manager [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1422.274933] env[63241]: DEBUG nova.virt.hardware [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1422.276016] env[63241]: DEBUG nova.virt.hardware [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1422.276016] env[63241]: DEBUG nova.virt.hardware [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1422.276016] env[63241]: DEBUG nova.virt.hardware [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1422.276016] env[63241]: DEBUG nova.virt.hardware [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1422.280016] env[63241]: DEBUG nova.virt.hardware [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1422.280016] env[63241]: DEBUG nova.virt.hardware [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1422.280016] env[63241]: DEBUG nova.virt.hardware [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1422.280016] env[63241]: DEBUG nova.virt.hardware [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1422.280016] env[63241]: DEBUG nova.virt.hardware [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1422.280198] env[63241]: DEBUG nova.virt.hardware [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1422.280198] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adec2bed-ba44-4d26-a322-80216b1ad450 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.286974] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17115ef2-d5f1-4edf-9ee8-1d7e930d7a53 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.337262] env[63241]: DEBUG oslo_vmware.api [None req-2cd2aa1c-dd84-4e02-af95-b76ede52975f tempest-ServersAdminNegativeTestJSON-15652427 tempest-ServersAdminNegativeTestJSON-15652427-project-admin] Task: {'id': task-1820030, 'name': SuspendVM_Task, 'duration_secs': 0.748554} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.337761] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd2aa1c-dd84-4e02-af95-b76ede52975f tempest-ServersAdminNegativeTestJSON-15652427 tempest-ServersAdminNegativeTestJSON-15652427-project-admin] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Suspended the VM {{(pid=63241) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1422.338093] env[63241]: DEBUG nova.compute.manager [None req-2cd2aa1c-dd84-4e02-af95-b76ede52975f tempest-ServersAdminNegativeTestJSON-15652427 tempest-ServersAdminNegativeTestJSON-15652427-project-admin] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1422.339253] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2c6fb2-6a06-4fbe-a09e-2d45dd69f63c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.381843] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820035, 'name': CreateVM_Task, 'duration_secs': 0.456376} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.384948] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1422.388696] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.388696] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.388696] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1422.388696] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fd77565-e968-4498-9e78-4f7fa3217541 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.392856] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820036, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.396928] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for the task: (returnval){ [ 1422.396928] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c8ab23-7920-6856-9ba8-ff40beffc296" [ 1422.396928] env[63241]: _type = "Task" [ 1422.396928] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.405964] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c8ab23-7920-6856-9ba8-ff40beffc296, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.408729] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.179s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.410577] env[63241]: DEBUG nova.network.neutron [-] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.418744] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 25.948s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.418744] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.004s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.418744] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1422.418744] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.957s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.418744] env[63241]: DEBUG nova.objects.instance [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lazy-loading 'resources' on Instance uuid 99eccbef-0e76-4532-af2f-5d74e563e1d2 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1422.419304] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a597df-d637-4290-b1d3-f7e5c92a6584 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.434136] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c18e6332-aeda-4925-b3ed-29a71297f37e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.437680] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f560238-0252-4439-b83a-0a08703615b1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.452479] env[63241]: INFO nova.scheduler.client.report [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted allocations for instance 11b1888e-95ec-4166-9219-0c38f8817dd4 [ 1422.471468] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ace6ded-13bb-4363-ae42-d517436268bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.477360] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5a0cf4-0b87-4b16-8c76-5c6fd4602405 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.492400] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.499931] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820baf65-5cb0-4a3d-a56a-4e93c67a85bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.537512] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180846MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1422.537692] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.553316] env[63241]: DEBUG nova.compute.manager [req-fe26168c-5e1e-4c0a-a9e5-64926f974339 req-473eba9f-47ab-4a84-9368-44644f01f9ad service nova] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Detach interface failed, port_id=985a0245-a31d-4625-9329-9eccd849fb4d, reason: Instance 41182989-2537-42f0-8c37-792b8b2c5206 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1422.720134] env[63241]: DEBUG nova.network.neutron [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Successfully updated port: 0546f3d6-e4c2-463d-8a45-8f00ad6722d1 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1422.738903] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820032, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518446} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.739736] env[63241]: DEBUG nova.network.neutron [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Updated VIF entry in instance network info cache for port 30a83f5f-b061-401d-84a9-eb0eefd0af82. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1422.740090] env[63241]: DEBUG nova.network.neutron [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Updating instance_info_cache with network_info: [{"id": "30a83f5f-b061-401d-84a9-eb0eefd0af82", "address": "fa:16:3e:d9:b4:b3", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30a83f5f-b0", "ovs_interfaceid": "30a83f5f-b061-401d-84a9-eb0eefd0af82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.742252] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 5060e745-08d0-429e-8780-bfdad7a29f30/5060e745-08d0-429e-8780-bfdad7a29f30.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1422.742252] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1422.742252] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9434d285-2058-4759-8437-62085eea0057 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.751035] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1422.751035] env[63241]: value = "task-1820037" [ 1422.751035] env[63241]: _type = "Task" [ 1422.751035] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.759477] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820037, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.893016] env[63241]: DEBUG oslo_vmware.api [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820036, 'name': PowerOnVM_Task, 'duration_secs': 0.594111} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.893353] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1422.893589] env[63241]: INFO nova.compute.manager [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Took 8.62 seconds to spawn the instance on the hypervisor. [ 1422.893807] env[63241]: DEBUG nova.compute.manager [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1422.894700] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46acd75-aed9-4db7-95b8-b9d5c24fb330 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.909851] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c8ab23-7920-6856-9ba8-ff40beffc296, 'name': SearchDatastore_Task, 'duration_secs': 0.043762} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.910368] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.910662] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1422.910811] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.910953] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.911141] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1422.911434] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bd2c83a-6357-4ef2-84dd-29c2ee144196 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.918805] env[63241]: INFO nova.compute.manager [-] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Took 1.34 seconds to deallocate network for instance. [ 1422.931232] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1422.933366] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1422.933366] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92a1cc31-d719-43e7-940e-606d47f95f8f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.942625] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for the task: (returnval){ [ 1422.942625] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52820467-6874-3f81-c027-bb66de8d9f4d" [ 1422.942625] env[63241]: _type = "Task" [ 1422.942625] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.953188] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52820467-6874-3f81-c027-bb66de8d9f4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.999781] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8a74c162-a167-4625-bea7-602e06b22f5f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "11b1888e-95ec-4166-9219-0c38f8817dd4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.032s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.201781] env[63241]: DEBUG oslo_concurrency.lockutils [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Acquiring lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.202036] env[63241]: DEBUG oslo_concurrency.lockutils [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.223396] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "refresh_cache-f1c19f17-ce7c-481a-99fd-d0bb20f1520b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.223549] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquired lock "refresh_cache-f1c19f17-ce7c-481a-99fd-d0bb20f1520b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.223695] env[63241]: DEBUG nova.network.neutron [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1423.243091] env[63241]: DEBUG oslo_concurrency.lockutils [req-0b345f9e-2937-4abf-be10-5bea0d86ab41 req-78a720a8-ee3c-4617-8808-6c6955e76c31 service nova] Releasing lock "refresh_cache-40217405-dcba-48cf-9d92-4122390d9fa8" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.260823] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820037, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.270993} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.261097] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1423.261871] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76cf412-1752-44e7-825a-61cbb31f72c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.288500] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 5060e745-08d0-429e-8780-bfdad7a29f30/5060e745-08d0-429e-8780-bfdad7a29f30.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1423.291362] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f93f1731-005b-4503-881f-10a4c33941bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.314243] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1423.314243] env[63241]: value = "task-1820038" [ 1423.314243] env[63241]: _type = "Task" [ 1423.314243] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.322616] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820038, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.416932] env[63241]: INFO nova.compute.manager [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Took 44.31 seconds to build instance. [ 1423.428606] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.448996] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cc6035-deea-4e1b-9132-3bc5a24063d9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.457986] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52820467-6874-3f81-c027-bb66de8d9f4d, 'name': SearchDatastore_Task, 'duration_secs': 0.046111} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.460361] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6262d953-d8a9-428f-bb50-d6c5d16b5961 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.463056] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8115c298-f8de-4ee6-b503-a99d51c1c670 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.469393] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for the task: (returnval){ [ 1423.469393] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c1417d-d1db-77bb-2005-8118232a16a7" [ 1423.469393] env[63241]: _type = "Task" [ 1423.469393] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.498659] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ecacb8b-a7ae-44d5-ba09-f7d1beda1621 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.504720] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c1417d-d1db-77bb-2005-8118232a16a7, 'name': SearchDatastore_Task, 'duration_secs': 0.025126} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.505331] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.505615] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 40217405-dcba-48cf-9d92-4122390d9fa8/40217405-dcba-48cf-9d92-4122390d9fa8.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1423.505861] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8d488e2-6227-4383-a88b-7475a874b6f1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.514172] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e48584-f5bc-4c3e-948e-a7eb181dc43b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.517131] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for the task: (returnval){ [ 1423.517131] env[63241]: value = "task-1820039" [ 1423.517131] env[63241]: _type = "Task" [ 1423.517131] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.528619] env[63241]: DEBUG nova.compute.provider_tree [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1423.531880] env[63241]: DEBUG nova.compute.manager [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Received event network-vif-plugged-0546f3d6-e4c2-463d-8a45-8f00ad6722d1 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1423.534098] env[63241]: DEBUG oslo_concurrency.lockutils [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] Acquiring lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.534098] env[63241]: DEBUG oslo_concurrency.lockutils [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] Lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.534098] env[63241]: DEBUG oslo_concurrency.lockutils [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] Lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.534098] env[63241]: DEBUG nova.compute.manager [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] No waiting events found dispatching network-vif-plugged-0546f3d6-e4c2-463d-8a45-8f00ad6722d1 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1423.534098] env[63241]: WARNING nova.compute.manager [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Received unexpected event network-vif-plugged-0546f3d6-e4c2-463d-8a45-8f00ad6722d1 for instance with vm_state building and task_state spawning. [ 1423.534417] env[63241]: DEBUG nova.compute.manager [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Received event network-changed-0546f3d6-e4c2-463d-8a45-8f00ad6722d1 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1423.534417] env[63241]: DEBUG nova.compute.manager [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Refreshing instance network info cache due to event network-changed-0546f3d6-e4c2-463d-8a45-8f00ad6722d1. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1423.534417] env[63241]: DEBUG oslo_concurrency.lockutils [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] Acquiring lock "refresh_cache-f1c19f17-ce7c-481a-99fd-d0bb20f1520b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.536908] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820039, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.776748] env[63241]: DEBUG nova.network.neutron [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1423.824975] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.872838] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.872838] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.919298] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a7a87d95-84f4-4b3f-ad02-aa5bfe239403 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.641s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.031971] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820039, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.038420] env[63241]: DEBUG nova.scheduler.client.report [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1424.052172] env[63241]: DEBUG nova.network.neutron [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Updating instance_info_cache with network_info: [{"id": "0546f3d6-e4c2-463d-8a45-8f00ad6722d1", "address": "fa:16:3e:55:5a:8b", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0546f3d6-e4", "ovs_interfaceid": "0546f3d6-e4c2-463d-8a45-8f00ad6722d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.279173] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "e753da08-d4a5-4f17-85c8-154e843798c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.279419] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "e753da08-d4a5-4f17-85c8-154e843798c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.324064] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820038, 'name': ReconfigVM_Task, 'duration_secs': 0.737583} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.324355] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 5060e745-08d0-429e-8780-bfdad7a29f30/5060e745-08d0-429e-8780-bfdad7a29f30.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1424.325116] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd86b75e-809d-431a-8b85-100ae1a88176 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.333906] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1424.333906] env[63241]: value = "task-1820040" [ 1424.333906] env[63241]: _type = "Task" [ 1424.333906] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.342062] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820040, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.423031] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1424.531500] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820039, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.810436} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.531799] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 40217405-dcba-48cf-9d92-4122390d9fa8/40217405-dcba-48cf-9d92-4122390d9fa8.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1424.532026] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1424.532279] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d49a2657-c6cf-4b99-8b9a-75a229d69e60 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.539603] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for the task: (returnval){ [ 1424.539603] env[63241]: value = "task-1820041" [ 1424.539603] env[63241]: _type = "Task" [ 1424.539603] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.550022] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.133s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.551898] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820041, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.553077] env[63241]: DEBUG oslo_concurrency.lockutils [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.077s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.553077] env[63241]: DEBUG nova.objects.instance [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Lazy-loading 'resources' on Instance uuid eb506425-4ecc-44b7-afa4-0901fc60b04f {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1424.556182] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Releasing lock "refresh_cache-f1c19f17-ce7c-481a-99fd-d0bb20f1520b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.557022] env[63241]: DEBUG nova.compute.manager [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Instance network_info: |[{"id": "0546f3d6-e4c2-463d-8a45-8f00ad6722d1", "address": "fa:16:3e:55:5a:8b", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0546f3d6-e4", "ovs_interfaceid": "0546f3d6-e4c2-463d-8a45-8f00ad6722d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1424.557022] env[63241]: DEBUG oslo_concurrency.lockutils [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] Acquired lock "refresh_cache-f1c19f17-ce7c-481a-99fd-d0bb20f1520b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.557243] env[63241]: DEBUG nova.network.neutron [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Refreshing network info cache for port 0546f3d6-e4c2-463d-8a45-8f00ad6722d1 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1424.558022] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:5a:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0546f3d6-e4c2-463d-8a45-8f00ad6722d1', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1424.565840] env[63241]: DEBUG oslo.service.loopingcall [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1424.569421] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1424.569892] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-875e0cdf-bfc2-4be5-8d2a-eb3b715aa94e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.585111] env[63241]: INFO nova.scheduler.client.report [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleted allocations for instance 99eccbef-0e76-4532-af2f-5d74e563e1d2 [ 1424.591728] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1424.591728] env[63241]: value = "task-1820042" [ 1424.591728] env[63241]: _type = "Task" [ 1424.591728] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.600874] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820042, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.846306] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820040, 'name': Rename_Task, 'duration_secs': 0.166029} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.846588] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1424.846854] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71950e90-896a-48ac-a7a4-0b1c2ff990d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.853266] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1424.853266] env[63241]: value = "task-1820043" [ 1424.853266] env[63241]: _type = "Task" [ 1424.853266] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.863108] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820043, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.942267] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.051344] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820041, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068271} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.051676] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1425.052420] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16bc280-3e39-4f11-b3aa-b19698e823fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.078526] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 40217405-dcba-48cf-9d92-4122390d9fa8/40217405-dcba-48cf-9d92-4122390d9fa8.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1425.078853] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22173c74-ce55-4129-b0f9-4c0549b59709 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.094817] env[63241]: DEBUG nova.network.neutron [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Updated VIF entry in instance network info cache for port 0546f3d6-e4c2-463d-8a45-8f00ad6722d1. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1425.095183] env[63241]: DEBUG nova.network.neutron [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Updating instance_info_cache with network_info: [{"id": "0546f3d6-e4c2-463d-8a45-8f00ad6722d1", "address": "fa:16:3e:55:5a:8b", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0546f3d6-e4", "ovs_interfaceid": "0546f3d6-e4c2-463d-8a45-8f00ad6722d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.105794] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be8579df-ae84-4f46-9b80-330d1c0e685c tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "99eccbef-0e76-4532-af2f-5d74e563e1d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.312s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.111921] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820042, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.114254] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for the task: (returnval){ [ 1425.114254] env[63241]: value = "task-1820044" [ 1425.114254] env[63241]: _type = "Task" [ 1425.114254] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.128613] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820044, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.365435] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820043, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.581673] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2beaa9c-5ecd-4fbc-b074-bc95d830d802 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.591256] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b456b8a-32b2-4aad-94a7-e2b152e5fdf9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.619222] env[63241]: DEBUG oslo_concurrency.lockutils [req-83605c18-d0bc-4cf5-bae4-f2705ff8db2f req-847dc218-e826-4ed0-b120-443f2b321716 service nova] Releasing lock "refresh_cache-f1c19f17-ce7c-481a-99fd-d0bb20f1520b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.626061] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec7bbfc-2a8b-406c-81e8-5795365f964a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.633487] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820042, 'name': CreateVM_Task, 'duration_secs': 0.580797} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.637919] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1425.638208] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820044, 'name': ReconfigVM_Task, 'duration_secs': 0.45821} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.638867] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1425.639047] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.639361] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1425.639607] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 40217405-dcba-48cf-9d92-4122390d9fa8/40217405-dcba-48cf-9d92-4122390d9fa8.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1425.641194] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86676f5-22bb-4a70-9c5c-273e68b3af7f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.644758] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0d4b8dd-ed97-4401-a3dc-76b6213edbd0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.646422] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cc8dda74-555d-4f5c-9be7-c0ee389b78fa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.657242] env[63241]: DEBUG nova.compute.provider_tree [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1425.660382] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1425.660382] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e1e4d0-fd6b-da84-d2b1-61fdcab909b5" [ 1425.660382] env[63241]: _type = "Task" [ 1425.660382] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.660621] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for the task: (returnval){ [ 1425.660621] env[63241]: value = "task-1820045" [ 1425.660621] env[63241]: _type = "Task" [ 1425.660621] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.673128] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820045, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.676318] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e1e4d0-fd6b-da84-d2b1-61fdcab909b5, 'name': SearchDatastore_Task, 'duration_secs': 0.010576} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.676616] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.676894] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1425.677152] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1425.677298] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.677477] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1425.677970] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97777b3f-61ab-4909-8a2a-d786f6a86516 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.683189] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "d60c3a22-19fb-4826-be88-d0307810a079" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.683399] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "d60c3a22-19fb-4826-be88-d0307810a079" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.683594] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "d60c3a22-19fb-4826-be88-d0307810a079-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.683775] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "d60c3a22-19fb-4826-be88-d0307810a079-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.683935] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "d60c3a22-19fb-4826-be88-d0307810a079-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.685936] env[63241]: INFO nova.compute.manager [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Terminating instance [ 1425.689011] env[63241]: DEBUG nova.compute.manager [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1425.689211] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1425.689488] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1425.689647] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1425.690784] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8458b5-3bb2-4b9a-815d-973e158a493e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.693285] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe19ebdd-8c13-4784-b3da-ef3cb05dcaa4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.698743] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1425.698743] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]525a1e44-c752-312d-a143-c6b2fc123f48" [ 1425.698743] env[63241]: _type = "Task" [ 1425.698743] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.700797] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1425.703569] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59eaed6c-5460-4ee2-ae92-9284bca33109 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.709853] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525a1e44-c752-312d-a143-c6b2fc123f48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.806844] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1425.807212] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1425.807432] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Deleting the datastore file [datastore1] d60c3a22-19fb-4826-be88-d0307810a079 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1425.807863] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f1a029a-40e9-49a8-b932-25d0fbbbd11e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.813668] env[63241]: DEBUG oslo_vmware.api [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1425.813668] env[63241]: value = "task-1820047" [ 1425.813668] env[63241]: _type = "Task" [ 1425.813668] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.821365] env[63241]: DEBUG oslo_vmware.api [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.863730] env[63241]: DEBUG oslo_vmware.api [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820043, 'name': PowerOnVM_Task, 'duration_secs': 0.580228} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.864138] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1425.864385] env[63241]: INFO nova.compute.manager [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Took 8.88 seconds to spawn the instance on the hypervisor. [ 1425.864662] env[63241]: DEBUG nova.compute.manager [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1425.865478] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7364363f-8917-45a3-99c2-e1033a8571d8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.162275] env[63241]: DEBUG nova.scheduler.client.report [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1426.174813] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820045, 'name': Rename_Task, 'duration_secs': 0.240985} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.175103] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1426.176091] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-089de5ea-9360-4a86-8f7b-53ae4ae749ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.181791] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for the task: (returnval){ [ 1426.181791] env[63241]: value = "task-1820048" [ 1426.181791] env[63241]: _type = "Task" [ 1426.181791] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.190027] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820048, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.210403] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525a1e44-c752-312d-a143-c6b2fc123f48, 'name': SearchDatastore_Task, 'duration_secs': 0.031408} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.211201] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eabe1431-27ab-4103-a4bd-4ff454cfd57b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.216318] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1426.216318] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52de7c0b-a84b-843e-d4cb-ced46b7bc048" [ 1426.216318] env[63241]: _type = "Task" [ 1426.216318] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.224460] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52de7c0b-a84b-843e-d4cb-ced46b7bc048, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.324194] env[63241]: DEBUG oslo_vmware.api [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.389012] env[63241]: INFO nova.compute.manager [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Took 40.98 seconds to build instance. [ 1426.670256] env[63241]: DEBUG oslo_concurrency.lockutils [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.118s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.672578] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.590s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.674244] env[63241]: INFO nova.compute.claims [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1426.693483] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820048, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.694484] env[63241]: INFO nova.scheduler.client.report [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Deleted allocations for instance eb506425-4ecc-44b7-afa4-0901fc60b04f [ 1426.728452] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52de7c0b-a84b-843e-d4cb-ced46b7bc048, 'name': SearchDatastore_Task, 'duration_secs': 0.037958} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.728720] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.728978] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f1c19f17-ce7c-481a-99fd-d0bb20f1520b/f1c19f17-ce7c-481a-99fd-d0bb20f1520b.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1426.729279] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9f19993-1d1d-44ea-bb80-37f53ee88dd2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.735795] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1426.735795] env[63241]: value = "task-1820049" [ 1426.735795] env[63241]: _type = "Task" [ 1426.735795] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.745305] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.825690] env[63241]: DEBUG oslo_vmware.api [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.705609} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.826058] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1426.826321] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1426.826524] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1426.826704] env[63241]: INFO nova.compute.manager [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1426.827043] env[63241]: DEBUG oslo.service.loopingcall [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.827714] env[63241]: DEBUG nova.compute.manager [-] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1426.827714] env[63241]: DEBUG nova.network.neutron [-] [instance: d60c3a22-19fb-4826-be88-d0307810a079] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1426.890646] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bedaf620-2310-4bdf-a845-a31dc7e88928 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "5060e745-08d0-429e-8780-bfdad7a29f30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.887s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.192762] env[63241]: DEBUG oslo_vmware.api [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820048, 'name': PowerOnVM_Task, 'duration_secs': 0.845858} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.193545] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1427.193893] env[63241]: INFO nova.compute.manager [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Took 7.64 seconds to spawn the instance on the hypervisor. [ 1427.194198] env[63241]: DEBUG nova.compute.manager [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1427.195298] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f95cf3b-a848-4147-b0f8-4cf0de86c08e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.207027] env[63241]: DEBUG oslo_concurrency.lockutils [None req-27efc188-b3ff-4bc6-a99f-7a5e1198ba73 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442 tempest-FloatingIPsAssociationNegativeTestJSON-1495889442-project-member] Lock "eb506425-4ecc-44b7-afa4-0901fc60b04f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.020s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.236100] env[63241]: DEBUG nova.compute.manager [req-a0c57bbc-2e55-4629-9f7d-74c5f391161f req-c18a6ff2-964a-444f-8433-7f61eb0e9869 service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Received event network-vif-deleted-047a4f6e-0c89-41ce-ab72-9a6d521031c4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1427.236255] env[63241]: INFO nova.compute.manager [req-a0c57bbc-2e55-4629-9f7d-74c5f391161f req-c18a6ff2-964a-444f-8433-7f61eb0e9869 service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Neutron deleted interface 047a4f6e-0c89-41ce-ab72-9a6d521031c4; detaching it from the instance and deleting it from the info cache [ 1427.236356] env[63241]: DEBUG nova.network.neutron [req-a0c57bbc-2e55-4629-9f7d-74c5f391161f req-c18a6ff2-964a-444f-8433-7f61eb0e9869 service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.253184] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820049, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.395244] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1427.706774] env[63241]: DEBUG nova.network.neutron [-] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.726134] env[63241]: INFO nova.compute.manager [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Took 40.20 seconds to build instance. [ 1427.742308] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-989bab2e-01e7-4a68-8533-102edcf9c885 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.753443] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820049, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542765} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.756915] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f1c19f17-ce7c-481a-99fd-d0bb20f1520b/f1c19f17-ce7c-481a-99fd-d0bb20f1520b.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1427.757149] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1427.757860] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e124cc5-80ff-4c63-bd99-b82952b92001 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.762684] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61cb000-aa38-4117-a4df-b3dd6a018f12 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.781150] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1427.781150] env[63241]: value = "task-1820050" [ 1427.781150] env[63241]: _type = "Task" [ 1427.781150] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.806051] env[63241]: DEBUG nova.compute.manager [req-a0c57bbc-2e55-4629-9f7d-74c5f391161f req-c18a6ff2-964a-444f-8433-7f61eb0e9869 service nova] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Detach interface failed, port_id=047a4f6e-0c89-41ce-ab72-9a6d521031c4, reason: Instance d60c3a22-19fb-4826-be88-d0307810a079 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1427.806164] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820050, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.895028] env[63241]: DEBUG nova.compute.manager [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1427.898284] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f98e61-e251-47ad-810e-18bcfdb3b558 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.915903] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.951251] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquiring lock "40217405-dcba-48cf-9d92-4122390d9fa8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.212687] env[63241]: INFO nova.compute.manager [-] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Took 1.39 seconds to deallocate network for instance. [ 1428.224035] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19aa43b-c1c4-46e9-9595-edfbdc292bc2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.228444] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2265e435-021d-4a78-9826-7e03b754012b tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Lock "40217405-dcba-48cf-9d92-4122390d9fa8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.793s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.231555] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Lock "40217405-dcba-48cf-9d92-4122390d9fa8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.280s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.231793] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquiring lock "40217405-dcba-48cf-9d92-4122390d9fa8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.232017] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Lock "40217405-dcba-48cf-9d92-4122390d9fa8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.232193] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Lock "40217405-dcba-48cf-9d92-4122390d9fa8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.235116] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e38a1d-9bc2-4b4a-ad7f-cd236635262d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.239020] env[63241]: INFO nova.compute.manager [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Terminating instance [ 1428.269357] env[63241]: DEBUG nova.compute.manager [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1428.269612] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1428.271491] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b084a0-89dc-4f12-a556-725d914b1bf6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.275553] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb40b9c6-97e6-41db-86bc-5e11bcc76053 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.283821] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1428.286921] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4066437d-307a-4a7d-8983-d867c5be390a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.289301] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf328dc-410a-4ae0-ab70-60ffe2db208b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.298307] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820050, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063393} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.309105] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1428.309997] env[63241]: DEBUG nova.compute.provider_tree [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1428.311412] env[63241]: DEBUG oslo_vmware.api [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for the task: (returnval){ [ 1428.311412] env[63241]: value = "task-1820051" [ 1428.311412] env[63241]: _type = "Task" [ 1428.311412] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.312243] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0faa90-cb06-4245-8e99-8ebbe878610d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.341973] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] f1c19f17-ce7c-481a-99fd-d0bb20f1520b/f1c19f17-ce7c-481a-99fd-d0bb20f1520b.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1428.343683] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0dc9e5fd-070e-4a90-b034-53d68d1e1d12 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.358262] env[63241]: DEBUG oslo_vmware.api [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.364100] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1428.364100] env[63241]: value = "task-1820052" [ 1428.364100] env[63241]: _type = "Task" [ 1428.364100] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.374144] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820052, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.411204] env[63241]: INFO nova.compute.manager [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] instance snapshotting [ 1428.414039] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf772ba-b21e-4473-ba01-676f259e983b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.436445] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e82f12-a35c-4d1c-ad52-5b605b8bd4af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.719296] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.735262] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1428.828142] env[63241]: DEBUG oslo_vmware.api [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820051, 'name': PowerOffVM_Task, 'duration_secs': 0.452519} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.828399] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1428.828566] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1428.828818] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3da4465-9385-4881-9212-26c9fee696bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.835475] env[63241]: ERROR nova.scheduler.client.report [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [req-44b6b6f0-c844-4b75-9c02-d34bfb4fba42] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-44b6b6f0-c844-4b75-9c02-d34bfb4fba42"}]} [ 1428.852240] env[63241]: DEBUG nova.scheduler.client.report [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1428.869838] env[63241]: DEBUG nova.scheduler.client.report [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1428.870094] env[63241]: DEBUG nova.compute.provider_tree [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1428.875334] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820052, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.883977] env[63241]: DEBUG nova.scheduler.client.report [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1428.901925] env[63241]: DEBUG nova.scheduler.client.report [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1428.927495] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1428.927720] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1428.927935] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Deleting the datastore file [datastore1] 40217405-dcba-48cf-9d92-4122390d9fa8 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1428.928224] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef9d1f6f-75c8-48da-82b6-e852ac3f22bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.938021] env[63241]: DEBUG oslo_vmware.api [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for the task: (returnval){ [ 1428.938021] env[63241]: value = "task-1820054" [ 1428.938021] env[63241]: _type = "Task" [ 1428.938021] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.945116] env[63241]: DEBUG oslo_vmware.api [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820054, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.957476] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1428.957799] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5cb0becb-2913-4d35-b847-b504d55c4195 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.968545] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1428.968545] env[63241]: value = "task-1820055" [ 1428.968545] env[63241]: _type = "Task" [ 1428.968545] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.978453] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820055, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.256201] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.377055] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820052, 'name': ReconfigVM_Task, 'duration_secs': 0.650494} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.377055] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Reconfigured VM instance instance-0000001c to attach disk [datastore1] f1c19f17-ce7c-481a-99fd-d0bb20f1520b/f1c19f17-ce7c-481a-99fd-d0bb20f1520b.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1429.377055] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a18136ec-4939-49b0-8783-051431ee3855 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.382320] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff4988d-c43e-4fbb-bba6-6f7e8d82cc38 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.385782] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1429.385782] env[63241]: value = "task-1820056" [ 1429.385782] env[63241]: _type = "Task" [ 1429.385782] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.393832] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820056, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.396369] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3204cf0-6d35-4f6d-98d9-394cd7ef0920 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.427728] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a593176-353a-47ea-b12f-bcca669ff208 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.435600] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25263e87-7dd4-46a5-a658-6a0e154b827a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.452168] env[63241]: DEBUG nova.compute.provider_tree [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1429.456405] env[63241]: DEBUG oslo_vmware.api [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Task: {'id': task-1820054, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253238} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.456895] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1429.457105] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1429.457289] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1429.457458] env[63241]: INFO nova.compute.manager [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1429.457693] env[63241]: DEBUG oslo.service.loopingcall [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1429.457886] env[63241]: DEBUG nova.compute.manager [-] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1429.457998] env[63241]: DEBUG nova.network.neutron [-] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1429.478785] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820055, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.817053] env[63241]: DEBUG nova.compute.manager [req-b6f4b966-8a68-49a8-892b-64072f44085b req-5561f6ef-71db-423b-b6d6-2db11c230f4e service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Received event network-vif-deleted-30a83f5f-b061-401d-84a9-eb0eefd0af82 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1429.817323] env[63241]: INFO nova.compute.manager [req-b6f4b966-8a68-49a8-892b-64072f44085b req-5561f6ef-71db-423b-b6d6-2db11c230f4e service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Neutron deleted interface 30a83f5f-b061-401d-84a9-eb0eefd0af82; detaching it from the instance and deleting it from the info cache [ 1429.817531] env[63241]: DEBUG nova.network.neutron [req-b6f4b966-8a68-49a8-892b-64072f44085b req-5561f6ef-71db-423b-b6d6-2db11c230f4e service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.896909] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820056, 'name': Rename_Task, 'duration_secs': 0.178925} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.897213] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1429.897454] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54b33c1f-ad49-40e5-94be-9b915a5f9364 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.907042] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1429.907042] env[63241]: value = "task-1820057" [ 1429.907042] env[63241]: _type = "Task" [ 1429.907042] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.914646] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820057, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.958733] env[63241]: DEBUG nova.scheduler.client.report [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1429.981028] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820055, 'name': CreateSnapshot_Task, 'duration_secs': 0.618367} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.981028] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1429.981353] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11f46e6-3048-43b4-8673-79c4b9712885 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.273573] env[63241]: DEBUG nova.network.neutron [-] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.320439] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12f6c6da-cd17-493f-9115-fe86ebfb8b4b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.330135] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a6b8a7-4201-40e0-b4fe-8d350571e3f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.366974] env[63241]: DEBUG nova.compute.manager [req-b6f4b966-8a68-49a8-892b-64072f44085b req-5561f6ef-71db-423b-b6d6-2db11c230f4e service nova] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Detach interface failed, port_id=30a83f5f-b061-401d-84a9-eb0eefd0af82, reason: Instance 40217405-dcba-48cf-9d92-4122390d9fa8 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1430.418331] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820057, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.464421] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.792s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.465019] env[63241]: DEBUG nova.compute.manager [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1430.467827] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.879s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.470026] env[63241]: INFO nova.compute.claims [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1430.499509] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1430.500429] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9a52acb8-03c5-4b71-9863-192105f0f400 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.509974] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1430.509974] env[63241]: value = "task-1820058" [ 1430.509974] env[63241]: _type = "Task" [ 1430.509974] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.519539] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820058, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.778021] env[63241]: INFO nova.compute.manager [-] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Took 1.32 seconds to deallocate network for instance. [ 1430.918641] env[63241]: DEBUG oslo_vmware.api [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820057, 'name': PowerOnVM_Task, 'duration_secs': 0.850919} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.918913] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1430.918913] env[63241]: INFO nova.compute.manager [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Took 8.67 seconds to spawn the instance on the hypervisor. [ 1430.919097] env[63241]: DEBUG nova.compute.manager [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1430.919862] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093cc2aa-b5bc-4a4f-9775-fa9f4a2f772c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.974352] env[63241]: DEBUG nova.compute.utils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1430.975843] env[63241]: DEBUG nova.compute.manager [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1430.977013] env[63241]: DEBUG nova.network.neutron [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1431.023142] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820058, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.053848] env[63241]: DEBUG nova.policy [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54dc853b6f204a75ae7612f9fbd2d1f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecafb0abbdc74501b22b20b797c4c60c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1431.310315] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.438885] env[63241]: INFO nova.compute.manager [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Took 41.22 seconds to build instance. [ 1431.479406] env[63241]: DEBUG nova.compute.manager [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1431.528265] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820058, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.566416] env[63241]: DEBUG nova.network.neutron [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Successfully created port: 2e94bb05-0411-4916-b14d-3c2ebc9dfccd {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1431.947665] env[63241]: DEBUG oslo_concurrency.lockutils [None req-68a9302a-33d7-4780-a872-19ec065edeca tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.019s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.029040] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820058, 'name': CloneVM_Task, 'duration_secs': 1.50225} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.029535] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Created linked-clone VM from snapshot [ 1432.030918] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b47dac-7ad7-41d4-a0f9-41aa0e0c12fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.036467] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8699cd4c-1eb3-4072-8ab2-e6ea046fd52f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.044859] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Uploading image 3e6badee-21cf-44ac-a19a-274665d3cec2 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1432.050887] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6c6054-cba8-4f12-9591-4ef1fdd8afec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.088986] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77996405-6ca5-4b27-97d3-39f4322815e8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.098797] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1432.098797] env[63241]: value = "vm-377014" [ 1432.098797] env[63241]: _type = "VirtualMachine" [ 1432.098797] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1432.100042] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb5b86e-8a6a-4235-94b6-d2833de59da9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.104256] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-668beed9-45e6-4388-95ad-cd98f506b28b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.119452] env[63241]: DEBUG nova.compute.provider_tree [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1432.122979] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lease: (returnval){ [ 1432.122979] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529a4037-4bc7-e6fb-4f29-720cc0c71ca4" [ 1432.122979] env[63241]: _type = "HttpNfcLease" [ 1432.122979] env[63241]: } obtained for exporting VM: (result){ [ 1432.122979] env[63241]: value = "vm-377014" [ 1432.122979] env[63241]: _type = "VirtualMachine" [ 1432.122979] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1432.123248] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the lease: (returnval){ [ 1432.123248] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529a4037-4bc7-e6fb-4f29-720cc0c71ca4" [ 1432.123248] env[63241]: _type = "HttpNfcLease" [ 1432.123248] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1432.129794] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1432.129794] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529a4037-4bc7-e6fb-4f29-720cc0c71ca4" [ 1432.129794] env[63241]: _type = "HttpNfcLease" [ 1432.129794] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1432.130053] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1432.130053] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529a4037-4bc7-e6fb-4f29-720cc0c71ca4" [ 1432.130053] env[63241]: _type = "HttpNfcLease" [ 1432.130053] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1432.130742] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b52cae2-b28f-4014-b219-13ac931251f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.138304] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52320396-29dd-06f8-786e-e16a5e4a33d7/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1432.138474] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52320396-29dd-06f8-786e-e16a5e4a33d7/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1432.229988] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f602886a-20a1-48ff-81df-11cbd6bcb369 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.450020] env[63241]: DEBUG nova.compute.manager [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1432.493627] env[63241]: DEBUG nova.compute.manager [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1432.522021] env[63241]: DEBUG nova.virt.hardware [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1432.522426] env[63241]: DEBUG nova.virt.hardware [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1432.522722] env[63241]: DEBUG nova.virt.hardware [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1432.522919] env[63241]: DEBUG nova.virt.hardware [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1432.524031] env[63241]: DEBUG nova.virt.hardware [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1432.524031] env[63241]: DEBUG nova.virt.hardware [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1432.524031] env[63241]: DEBUG nova.virt.hardware [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1432.524031] env[63241]: DEBUG nova.virt.hardware [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1432.524031] env[63241]: DEBUG nova.virt.hardware [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1432.524231] env[63241]: DEBUG nova.virt.hardware [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1432.524231] env[63241]: DEBUG nova.virt.hardware [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1432.525406] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282f73d5-9ad8-4018-9abf-8a63d9dbf60b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.534354] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903c3dc3-67e1-4415-9f11-520bf55a2462 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.625258] env[63241]: DEBUG nova.scheduler.client.report [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1432.915046] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.915330] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.917102] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.917552] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.917552] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.920285] env[63241]: INFO nova.compute.manager [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Terminating instance [ 1432.922796] env[63241]: DEBUG nova.compute.manager [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1432.923162] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1432.924187] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05345ec-88cc-4a6f-9051-deae7c453fd9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.932727] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1432.933301] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-acf8e98f-e88e-4619-9a80-5acde03002d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.940599] env[63241]: DEBUG oslo_vmware.api [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1432.940599] env[63241]: value = "task-1820060" [ 1432.940599] env[63241]: _type = "Task" [ 1432.940599] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.949281] env[63241]: DEBUG oslo_vmware.api [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.974477] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.132758] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.133329] env[63241]: DEBUG nova.compute.manager [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1433.137149] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.076s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.138786] env[63241]: INFO nova.compute.claims [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1433.233032] env[63241]: DEBUG nova.compute.manager [req-4dcdbfcc-1747-4e8f-a6fd-4a2c532ea591 req-937c0eee-0328-4859-8029-3fd9c02f4eae service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Received event network-vif-plugged-2e94bb05-0411-4916-b14d-3c2ebc9dfccd {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1433.233433] env[63241]: DEBUG oslo_concurrency.lockutils [req-4dcdbfcc-1747-4e8f-a6fd-4a2c532ea591 req-937c0eee-0328-4859-8029-3fd9c02f4eae service nova] Acquiring lock "27177719-5090-43de-9bca-6db6bebab7b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.233721] env[63241]: DEBUG oslo_concurrency.lockutils [req-4dcdbfcc-1747-4e8f-a6fd-4a2c532ea591 req-937c0eee-0328-4859-8029-3fd9c02f4eae service nova] Lock "27177719-5090-43de-9bca-6db6bebab7b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.234061] env[63241]: DEBUG oslo_concurrency.lockutils [req-4dcdbfcc-1747-4e8f-a6fd-4a2c532ea591 req-937c0eee-0328-4859-8029-3fd9c02f4eae service nova] Lock "27177719-5090-43de-9bca-6db6bebab7b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.234269] env[63241]: DEBUG nova.compute.manager [req-4dcdbfcc-1747-4e8f-a6fd-4a2c532ea591 req-937c0eee-0328-4859-8029-3fd9c02f4eae service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] No waiting events found dispatching network-vif-plugged-2e94bb05-0411-4916-b14d-3c2ebc9dfccd {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1433.234446] env[63241]: WARNING nova.compute.manager [req-4dcdbfcc-1747-4e8f-a6fd-4a2c532ea591 req-937c0eee-0328-4859-8029-3fd9c02f4eae service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Received unexpected event network-vif-plugged-2e94bb05-0411-4916-b14d-3c2ebc9dfccd for instance with vm_state building and task_state spawning. [ 1433.315784] env[63241]: DEBUG nova.network.neutron [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Successfully updated port: 2e94bb05-0411-4916-b14d-3c2ebc9dfccd {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1433.450781] env[63241]: DEBUG oslo_vmware.api [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820060, 'name': PowerOffVM_Task, 'duration_secs': 0.271516} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.451080] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1433.451954] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1433.452255] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-840bf616-d1a7-4bf3-980c-46b98b2af38c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.534991] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1433.535248] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1433.535465] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Deleting the datastore file [datastore1] f1c19f17-ce7c-481a-99fd-d0bb20f1520b {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1433.535688] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c36250b8-c800-4c47-b2b4-d0b18768222a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.541728] env[63241]: DEBUG oslo_vmware.api [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for the task: (returnval){ [ 1433.541728] env[63241]: value = "task-1820062" [ 1433.541728] env[63241]: _type = "Task" [ 1433.541728] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.550744] env[63241]: DEBUG oslo_vmware.api [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.644403] env[63241]: DEBUG nova.compute.utils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1433.647391] env[63241]: DEBUG nova.compute.manager [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1433.647672] env[63241]: DEBUG nova.network.neutron [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1433.730532] env[63241]: DEBUG nova.policy [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb65db44f8304f9fbdd3106554e0cf98', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b914905bfcb64d31bae51b7236dc5b57', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1433.822282] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.822282] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.822282] env[63241]: DEBUG nova.network.neutron [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1434.052299] env[63241]: DEBUG oslo_vmware.api [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Task: {'id': task-1820062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268042} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.052673] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1434.052932] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1434.053194] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1434.053400] env[63241]: INFO nova.compute.manager [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1434.053703] env[63241]: DEBUG oslo.service.loopingcall [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1434.054008] env[63241]: DEBUG nova.compute.manager [-] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1434.054114] env[63241]: DEBUG nova.network.neutron [-] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1434.148485] env[63241]: DEBUG nova.compute.manager [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1434.399917] env[63241]: DEBUG nova.network.neutron [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1434.494481] env[63241]: DEBUG nova.network.neutron [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Successfully created port: bd711358-5fa1-44eb-afea-adfbb937a52a {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1434.671487] env[63241]: DEBUG nova.compute.manager [req-167b7cfe-4c28-48e6-a38f-3e33c1d25aaf req-9cfa96e3-9cf1-4c41-8757-89e5f33dcef6 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Received event network-vif-deleted-0546f3d6-e4c2-463d-8a45-8f00ad6722d1 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1434.671487] env[63241]: INFO nova.compute.manager [req-167b7cfe-4c28-48e6-a38f-3e33c1d25aaf req-9cfa96e3-9cf1-4c41-8757-89e5f33dcef6 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Neutron deleted interface 0546f3d6-e4c2-463d-8a45-8f00ad6722d1; detaching it from the instance and deleting it from the info cache [ 1434.671487] env[63241]: DEBUG nova.network.neutron [req-167b7cfe-4c28-48e6-a38f-3e33c1d25aaf req-9cfa96e3-9cf1-4c41-8757-89e5f33dcef6 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.830796] env[63241]: DEBUG nova.network.neutron [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Updating instance_info_cache with network_info: [{"id": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "address": "fa:16:3e:fd:57:de", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e94bb05-04", "ovs_interfaceid": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.878058] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e00844e-28be-495a-b05e-017de20ebe08 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.887676] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70052735-d4d5-489d-a5c0-2879645e2bb2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.923273] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e6b95d-5fed-468f-adbd-b25609865c9c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.931615] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab1f363-7199-426b-a979-9ce16dbacd92 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.946162] env[63241]: DEBUG nova.compute.provider_tree [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1435.048380] env[63241]: DEBUG nova.network.neutron [-] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.172758] env[63241]: DEBUG nova.compute.manager [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1435.175478] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ff7634b-638a-45ca-a972-fd5b0cb12ef6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.186306] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c829b6a3-4c4b-4c4a-9ff0-3110211110a6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.209570] env[63241]: DEBUG nova.virt.hardware [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1435.209858] env[63241]: DEBUG nova.virt.hardware [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1435.210633] env[63241]: DEBUG nova.virt.hardware [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1435.210892] env[63241]: DEBUG nova.virt.hardware [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1435.213922] env[63241]: DEBUG nova.virt.hardware [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1435.213922] env[63241]: DEBUG nova.virt.hardware [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1435.213922] env[63241]: DEBUG nova.virt.hardware [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1435.213922] env[63241]: DEBUG nova.virt.hardware [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1435.213922] env[63241]: DEBUG nova.virt.hardware [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1435.214261] env[63241]: DEBUG nova.virt.hardware [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1435.214261] env[63241]: DEBUG nova.virt.hardware [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1435.214261] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9fffcf-ddd8-4153-a88f-00d8e7beae3a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.230693] env[63241]: DEBUG nova.compute.manager [req-167b7cfe-4c28-48e6-a38f-3e33c1d25aaf req-9cfa96e3-9cf1-4c41-8757-89e5f33dcef6 service nova] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Detach interface failed, port_id=0546f3d6-e4c2-463d-8a45-8f00ad6722d1, reason: Instance f1c19f17-ce7c-481a-99fd-d0bb20f1520b could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1435.236636] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d93786a-3675-46c4-98ed-f908d864882b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.279675] env[63241]: DEBUG nova.compute.manager [req-25062e69-fdd1-4ec9-b68e-6a14009a8bd4 req-11ebeeb3-3514-44c3-acfa-56c73f0b881e service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Received event network-changed-2e94bb05-0411-4916-b14d-3c2ebc9dfccd {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1435.279876] env[63241]: DEBUG nova.compute.manager [req-25062e69-fdd1-4ec9-b68e-6a14009a8bd4 req-11ebeeb3-3514-44c3-acfa-56c73f0b881e service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Refreshing instance network info cache due to event network-changed-2e94bb05-0411-4916-b14d-3c2ebc9dfccd. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1435.280113] env[63241]: DEBUG oslo_concurrency.lockutils [req-25062e69-fdd1-4ec9-b68e-6a14009a8bd4 req-11ebeeb3-3514-44c3-acfa-56c73f0b881e service nova] Acquiring lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.337781] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1435.338221] env[63241]: DEBUG nova.compute.manager [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Instance network_info: |[{"id": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "address": "fa:16:3e:fd:57:de", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e94bb05-04", "ovs_interfaceid": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1435.338584] env[63241]: DEBUG oslo_concurrency.lockutils [req-25062e69-fdd1-4ec9-b68e-6a14009a8bd4 req-11ebeeb3-3514-44c3-acfa-56c73f0b881e service nova] Acquired lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.338774] env[63241]: DEBUG nova.network.neutron [req-25062e69-fdd1-4ec9-b68e-6a14009a8bd4 req-11ebeeb3-3514-44c3-acfa-56c73f0b881e service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Refreshing network info cache for port 2e94bb05-0411-4916-b14d-3c2ebc9dfccd {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1435.340160] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:57:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f4a795c-8718-4a7c-aafe-9da231df10f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e94bb05-0411-4916-b14d-3c2ebc9dfccd', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1435.348526] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Creating folder: Project (ecafb0abbdc74501b22b20b797c4c60c). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1435.352139] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12bd4145-2634-454c-942a-de91d289d104 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.363904] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Created folder: Project (ecafb0abbdc74501b22b20b797c4c60c) in parent group-v376927. [ 1435.364219] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Creating folder: Instances. Parent ref: group-v377015. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1435.364515] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e4e83914-6d23-425c-a2d7-e3eadbd74917 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.374896] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Created folder: Instances in parent group-v377015. [ 1435.375194] env[63241]: DEBUG oslo.service.loopingcall [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1435.375427] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1435.375675] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7474e01-a4b2-4283-8565-7f68605fd880 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.396181] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1435.396181] env[63241]: value = "task-1820065" [ 1435.396181] env[63241]: _type = "Task" [ 1435.396181] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.403922] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820065, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.449211] env[63241]: DEBUG nova.scheduler.client.report [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1435.550820] env[63241]: INFO nova.compute.manager [-] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Took 1.50 seconds to deallocate network for instance. [ 1435.739222] env[63241]: DEBUG nova.network.neutron [req-25062e69-fdd1-4ec9-b68e-6a14009a8bd4 req-11ebeeb3-3514-44c3-acfa-56c73f0b881e service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Updated VIF entry in instance network info cache for port 2e94bb05-0411-4916-b14d-3c2ebc9dfccd. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1435.739592] env[63241]: DEBUG nova.network.neutron [req-25062e69-fdd1-4ec9-b68e-6a14009a8bd4 req-11ebeeb3-3514-44c3-acfa-56c73f0b881e service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Updating instance_info_cache with network_info: [{"id": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "address": "fa:16:3e:fd:57:de", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e94bb05-04", "ovs_interfaceid": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.905964] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820065, 'name': CreateVM_Task, 'duration_secs': 0.389546} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.906171] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1435.906870] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.907135] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.907460] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1435.907733] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e682c219-e0ac-41d7-b146-8c3286ff01d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.912611] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1435.912611] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e4139d-328e-29be-dd58-c99be4feecef" [ 1435.912611] env[63241]: _type = "Task" [ 1435.912611] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.920923] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e4139d-328e-29be-dd58-c99be4feecef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.955019] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.818s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.955629] env[63241]: DEBUG nova.compute.manager [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1435.958697] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.354s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.958959] env[63241]: DEBUG nova.objects.instance [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Lazy-loading 'resources' on Instance uuid fe8eaeee-56b2-4974-a448-8f95848b3b3a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1436.058115] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.242246] env[63241]: DEBUG oslo_concurrency.lockutils [req-25062e69-fdd1-4ec9-b68e-6a14009a8bd4 req-11ebeeb3-3514-44c3-acfa-56c73f0b881e service nova] Releasing lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.422930] env[63241]: DEBUG nova.network.neutron [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Successfully updated port: bd711358-5fa1-44eb-afea-adfbb937a52a {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1436.432813] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e4139d-328e-29be-dd58-c99be4feecef, 'name': SearchDatastore_Task, 'duration_secs': 0.012958} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.433155] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.433397] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1436.433658] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.435418] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.435418] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1436.435604] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c2d42ee-507d-46e1-a234-19c2ec058816 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.445487] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1436.445686] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1436.446517] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d46d6dd9-cf07-430e-8861-bcddee19fcf9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.455577] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1436.455577] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f7550e-17ec-af71-3247-45b86fb3c97d" [ 1436.455577] env[63241]: _type = "Task" [ 1436.455577] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.468150] env[63241]: DEBUG nova.compute.utils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1436.469727] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f7550e-17ec-af71-3247-45b86fb3c97d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.470051] env[63241]: DEBUG nova.compute.manager [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Not allocating networking since 'none' was specified. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1436.927035] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquiring lock "refresh_cache-e2758650-2762-49f6-a678-f55425a89994" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.927274] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquired lock "refresh_cache-e2758650-2762-49f6-a678-f55425a89994" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.927522] env[63241]: DEBUG nova.network.neutron [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1436.934093] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522cc225-6f7e-4e73-ac70-c3158ce563f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.942954] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5448e2a8-c89a-4631-bc4c-934859693a67 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.976217] env[63241]: DEBUG nova.compute.manager [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1436.983666] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc42208-e118-4e97-8035-578c5bc477c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.993515] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f7550e-17ec-af71-3247-45b86fb3c97d, 'name': SearchDatastore_Task, 'duration_secs': 0.013476} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.996776] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62c0449c-32e6-4b6a-93f7-3706260de479 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.000116] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf76f81d-c342-49e9-9ccd-f52b32702cc0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.007388] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1437.007388] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5267ff43-7a7f-9029-2238-6eb64bc5b448" [ 1437.007388] env[63241]: _type = "Task" [ 1437.007388] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.015576] env[63241]: DEBUG nova.compute.provider_tree [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1437.028185] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5267ff43-7a7f-9029-2238-6eb64bc5b448, 'name': SearchDatastore_Task, 'duration_secs': 0.01376} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.028435] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.028682] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 27177719-5090-43de-9bca-6db6bebab7b4/27177719-5090-43de-9bca-6db6bebab7b4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1437.028930] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dda4b34a-259b-41f7-8f25-2869cfdb2b09 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.035983] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1437.035983] env[63241]: value = "task-1820066" [ 1437.035983] env[63241]: _type = "Task" [ 1437.035983] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.043817] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.315778] env[63241]: DEBUG nova.compute.manager [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] [instance: e2758650-2762-49f6-a678-f55425a89994] Received event network-vif-plugged-bd711358-5fa1-44eb-afea-adfbb937a52a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1437.316051] env[63241]: DEBUG oslo_concurrency.lockutils [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] Acquiring lock "e2758650-2762-49f6-a678-f55425a89994-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.316254] env[63241]: DEBUG oslo_concurrency.lockutils [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] Lock "e2758650-2762-49f6-a678-f55425a89994-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.316415] env[63241]: DEBUG oslo_concurrency.lockutils [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] Lock "e2758650-2762-49f6-a678-f55425a89994-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.316600] env[63241]: DEBUG nova.compute.manager [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] [instance: e2758650-2762-49f6-a678-f55425a89994] No waiting events found dispatching network-vif-plugged-bd711358-5fa1-44eb-afea-adfbb937a52a {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1437.316734] env[63241]: WARNING nova.compute.manager [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] [instance: e2758650-2762-49f6-a678-f55425a89994] Received unexpected event network-vif-plugged-bd711358-5fa1-44eb-afea-adfbb937a52a for instance with vm_state building and task_state spawning. [ 1437.316900] env[63241]: DEBUG nova.compute.manager [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] [instance: e2758650-2762-49f6-a678-f55425a89994] Received event network-changed-bd711358-5fa1-44eb-afea-adfbb937a52a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1437.317093] env[63241]: DEBUG nova.compute.manager [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] [instance: e2758650-2762-49f6-a678-f55425a89994] Refreshing instance network info cache due to event network-changed-bd711358-5fa1-44eb-afea-adfbb937a52a. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1437.317371] env[63241]: DEBUG oslo_concurrency.lockutils [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] Acquiring lock "refresh_cache-e2758650-2762-49f6-a678-f55425a89994" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.475152] env[63241]: DEBUG nova.network.neutron [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1437.521805] env[63241]: DEBUG nova.scheduler.client.report [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1437.547651] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820066, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.660573] env[63241]: DEBUG nova.network.neutron [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Updating instance_info_cache with network_info: [{"id": "bd711358-5fa1-44eb-afea-adfbb937a52a", "address": "fa:16:3e:a0:57:e0", "network": {"id": "224f885e-4de5-43f1-812c-eb20a195a19a", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-290379587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b914905bfcb64d31bae51b7236dc5b57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd711358-5f", "ovs_interfaceid": "bd711358-5fa1-44eb-afea-adfbb937a52a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.988100] env[63241]: DEBUG nova.compute.manager [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1438.012525] env[63241]: DEBUG nova.virt.hardware [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1438.012777] env[63241]: DEBUG nova.virt.hardware [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1438.012931] env[63241]: DEBUG nova.virt.hardware [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1438.013132] env[63241]: DEBUG nova.virt.hardware [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1438.013285] env[63241]: DEBUG nova.virt.hardware [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1438.013430] env[63241]: DEBUG nova.virt.hardware [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1438.013640] env[63241]: DEBUG nova.virt.hardware [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1438.013797] env[63241]: DEBUG nova.virt.hardware [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1438.013961] env[63241]: DEBUG nova.virt.hardware [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1438.014138] env[63241]: DEBUG nova.virt.hardware [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1438.014311] env[63241]: DEBUG nova.virt.hardware [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1438.015243] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46adb73a-0449-456e-a38e-df051a87b3b4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.023832] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf868d3-cb00-46e6-8f8b-8e64d6af75e5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.031102] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.070s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.031102] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.534s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.031897] env[63241]: INFO nova.compute.claims [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1438.048094] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1438.053819] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Creating folder: Project (90844013be3444e187bac99366b5045e). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1438.054999] env[63241]: INFO nova.scheduler.client.report [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Deleted allocations for instance fe8eaeee-56b2-4974-a448-8f95848b3b3a [ 1438.063112] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ff01bf5-88dc-4070-8e9f-73365cf4227f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.072505] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584891} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.073322] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 27177719-5090-43de-9bca-6db6bebab7b4/27177719-5090-43de-9bca-6db6bebab7b4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1438.073538] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1438.074912] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-066776bc-935b-45b3-80a7-647406a0fe9e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.077214] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Created folder: Project (90844013be3444e187bac99366b5045e) in parent group-v376927. [ 1438.077429] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Creating folder: Instances. Parent ref: group-v377018. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1438.077715] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68110199-eb58-447c-8ed5-94301a432eff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.084074] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1438.084074] env[63241]: value = "task-1820068" [ 1438.084074] env[63241]: _type = "Task" [ 1438.084074] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.088833] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Created folder: Instances in parent group-v377018. [ 1438.089369] env[63241]: DEBUG oslo.service.loopingcall [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1438.090320] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1438.090537] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d550be21-1f28-4af5-87ae-b4fddb92f4bd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.106514] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820068, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.111538] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1438.111538] env[63241]: value = "task-1820070" [ 1438.111538] env[63241]: _type = "Task" [ 1438.111538] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.121086] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820070, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.163136] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Releasing lock "refresh_cache-e2758650-2762-49f6-a678-f55425a89994" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.163535] env[63241]: DEBUG nova.compute.manager [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Instance network_info: |[{"id": "bd711358-5fa1-44eb-afea-adfbb937a52a", "address": "fa:16:3e:a0:57:e0", "network": {"id": "224f885e-4de5-43f1-812c-eb20a195a19a", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-290379587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b914905bfcb64d31bae51b7236dc5b57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd711358-5f", "ovs_interfaceid": "bd711358-5fa1-44eb-afea-adfbb937a52a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1438.163894] env[63241]: DEBUG oslo_concurrency.lockutils [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] Acquired lock "refresh_cache-e2758650-2762-49f6-a678-f55425a89994" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.164145] env[63241]: DEBUG nova.network.neutron [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] [instance: e2758650-2762-49f6-a678-f55425a89994] Refreshing network info cache for port bd711358-5fa1-44eb-afea-adfbb937a52a {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1438.165504] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:57:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd711358-5fa1-44eb-afea-adfbb937a52a', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1438.174444] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Creating folder: Project (b914905bfcb64d31bae51b7236dc5b57). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1438.175658] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b3a11a1-f0d7-447c-80a8-36e0a4de4b70 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.186393] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Created folder: Project (b914905bfcb64d31bae51b7236dc5b57) in parent group-v376927. [ 1438.186648] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Creating folder: Instances. Parent ref: group-v377021. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1438.186897] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f0d04e6-31b5-4315-b03e-b8e7c05ae06f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.197926] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Created folder: Instances in parent group-v377021. [ 1438.198217] env[63241]: DEBUG oslo.service.loopingcall [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1438.198419] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2758650-2762-49f6-a678-f55425a89994] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1438.198639] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3eb7124-0465-4e6c-9523-03f60e0852ad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.218972] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1438.218972] env[63241]: value = "task-1820073" [ 1438.218972] env[63241]: _type = "Task" [ 1438.218972] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.227463] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820073, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.570888] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e7235d38-a8a1-480d-a454-599e250ff686 tempest-ServerDiagnosticsTest-768512356 tempest-ServerDiagnosticsTest-768512356-project-member] Lock "fe8eaeee-56b2-4974-a448-8f95848b3b3a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.009s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.602147] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820068, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068158} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.602147] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1438.604094] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53a75b4-73d6-4f02-9f67-db24d62efd94 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.636397] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 27177719-5090-43de-9bca-6db6bebab7b4/27177719-5090-43de-9bca-6db6bebab7b4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1438.640308] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8b44175-2509-4429-869d-fe9165d391a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.662661] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820070, 'name': CreateVM_Task, 'duration_secs': 0.280923} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.664019] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1438.664431] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1438.664431] env[63241]: value = "task-1820074" [ 1438.664431] env[63241]: _type = "Task" [ 1438.664431] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.664815] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.665069] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.665338] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1438.665666] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82e12e7b-09a8-4ebf-899c-98cf7fc4e8b5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.678400] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820074, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.680292] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1438.680292] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523d8a0b-105f-bcc0-a861-85db39d21e61" [ 1438.680292] env[63241]: _type = "Task" [ 1438.680292] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.691823] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523d8a0b-105f-bcc0-a861-85db39d21e61, 'name': SearchDatastore_Task} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.692319] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.692595] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1438.694092] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.694092] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.694092] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1438.694092] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-285c81fc-1401-4147-8f4d-f52ed337e49d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.703040] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1438.707027] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1438.707027] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00cce0de-197f-472c-9ae9-9682fe7ae0f2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.710310] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1438.710310] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52fe7003-5920-ab77-88c7-f2585faae468" [ 1438.710310] env[63241]: _type = "Task" [ 1438.710310] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.721524] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fe7003-5920-ab77-88c7-f2585faae468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.730021] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820073, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.073425] env[63241]: DEBUG nova.network.neutron [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] [instance: e2758650-2762-49f6-a678-f55425a89994] Updated VIF entry in instance network info cache for port bd711358-5fa1-44eb-afea-adfbb937a52a. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1439.073425] env[63241]: DEBUG nova.network.neutron [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] [instance: e2758650-2762-49f6-a678-f55425a89994] Updating instance_info_cache with network_info: [{"id": "bd711358-5fa1-44eb-afea-adfbb937a52a", "address": "fa:16:3e:a0:57:e0", "network": {"id": "224f885e-4de5-43f1-812c-eb20a195a19a", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-290379587-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b914905bfcb64d31bae51b7236dc5b57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd711358-5f", "ovs_interfaceid": "bd711358-5fa1-44eb-afea-adfbb937a52a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.180128] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820074, 'name': ReconfigVM_Task, 'duration_secs': 0.328256} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.180449] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 27177719-5090-43de-9bca-6db6bebab7b4/27177719-5090-43de-9bca-6db6bebab7b4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1439.181769] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a268377-bbe0-4cf4-9490-38641d947a28 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.188143] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1439.188143] env[63241]: value = "task-1820075" [ 1439.188143] env[63241]: _type = "Task" [ 1439.188143] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.197963] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820075, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.222516] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fe7003-5920-ab77-88c7-f2585faae468, 'name': SearchDatastore_Task, 'duration_secs': 0.013997} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.229380] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bbe77ec-d1b7-4f48-ab0f-29e31adf8025 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.236966] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820073, 'name': CreateVM_Task, 'duration_secs': 0.521057} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.238393] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2758650-2762-49f6-a678-f55425a89994] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1439.238772] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1439.238772] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520afb93-c5c3-18c5-c07d-ad5d3157f7f4" [ 1439.238772] env[63241]: _type = "Task" [ 1439.238772] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.241675] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.241867] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.242205] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1439.242696] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7b3cccc-190a-4988-b5f1-b34796a30378 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.250298] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for the task: (returnval){ [ 1439.250298] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522bbc5b-29b9-5de2-9f5e-105d87e03cf8" [ 1439.250298] env[63241]: _type = "Task" [ 1439.250298] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.253873] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520afb93-c5c3-18c5-c07d-ad5d3157f7f4, 'name': SearchDatastore_Task, 'duration_secs': 0.013192} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.259788] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.260108] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c390d1ca-a199-4df6-847a-b543630a7bf5/c390d1ca-a199-4df6-847a-b543630a7bf5.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1439.261917] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-582fe5dd-fbbd-4803-a207-5889cd4f8230 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.270213] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522bbc5b-29b9-5de2-9f5e-105d87e03cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.012992} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.271676] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.271934] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1439.272207] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.272380] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.272572] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1439.272916] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1439.272916] env[63241]: value = "task-1820076" [ 1439.272916] env[63241]: _type = "Task" [ 1439.272916] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.273131] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02c34254-ab98-42a8-8e24-a1578fdd7658 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.287068] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.297399] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1439.297591] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1439.298546] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dab224f8-db74-4723-90fc-2bed6595d607 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.306354] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for the task: (returnval){ [ 1439.306354] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ce6bd9-a4be-bd4c-9165-4eb55a684681" [ 1439.306354] env[63241]: _type = "Task" [ 1439.306354] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.314481] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ce6bd9-a4be-bd4c-9165-4eb55a684681, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.563285] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27396639-7437-4172-9a69-3ad685378680 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.571368] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318f830a-5adc-4b40-821b-97fd9cfbf8c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.576388] env[63241]: DEBUG oslo_concurrency.lockutils [req-c8a04a47-829a-440f-aedd-8e8601aee1a4 req-d7a71c62-7d4a-4081-8ae8-92c3a60c8a9e service nova] Releasing lock "refresh_cache-e2758650-2762-49f6-a678-f55425a89994" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.603160] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd7192b-49c5-4bf4-97a6-c42aa6c97aa9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.611241] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40787cc-d0c9-4530-8908-b662be8b3b8e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.625516] env[63241]: DEBUG nova.compute.provider_tree [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1439.698495] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820075, 'name': Rename_Task, 'duration_secs': 0.152167} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.698816] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1439.699082] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02eebd36-2b8f-43d3-96b6-f6f8a8b09c7f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.706206] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1439.706206] env[63241]: value = "task-1820077" [ 1439.706206] env[63241]: _type = "Task" [ 1439.706206] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.714544] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820077, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.788394] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820076, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.818778] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ce6bd9-a4be-bd4c-9165-4eb55a684681, 'name': SearchDatastore_Task, 'duration_secs': 0.026415} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.819587] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54eac7c9-2348-49a0-ac3c-ed34cb07d8f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.825507] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for the task: (returnval){ [ 1439.825507] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b25b8c-13f9-7719-af38-e982789c00f3" [ 1439.825507] env[63241]: _type = "Task" [ 1439.825507] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.834462] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b25b8c-13f9-7719-af38-e982789c00f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.129114] env[63241]: DEBUG nova.scheduler.client.report [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1440.217734] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820077, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.287043] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820076, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.777574} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.287374] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c390d1ca-a199-4df6-847a-b543630a7bf5/c390d1ca-a199-4df6-847a-b543630a7bf5.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1440.287635] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1440.287921] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bbdcab5c-521b-4c46-bca3-7bcd0a282133 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.295202] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1440.295202] env[63241]: value = "task-1820078" [ 1440.295202] env[63241]: _type = "Task" [ 1440.295202] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.306185] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820078, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.335269] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b25b8c-13f9-7719-af38-e982789c00f3, 'name': SearchDatastore_Task, 'duration_secs': 0.051724} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.335563] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.335817] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e2758650-2762-49f6-a678-f55425a89994/e2758650-2762-49f6-a678-f55425a89994.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1440.336078] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6338181b-8d45-4ad9-8a0f-af69f70515ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.342483] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for the task: (returnval){ [ 1440.342483] env[63241]: value = "task-1820079" [ 1440.342483] env[63241]: _type = "Task" [ 1440.342483] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.347617] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52320396-29dd-06f8-786e-e16a5e4a33d7/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1440.348409] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7033db2a-60a2-49ef-ab65-bb1643ef2f84 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.355419] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820079, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.356994] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52320396-29dd-06f8-786e-e16a5e4a33d7/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1440.357191] env[63241]: ERROR oslo_vmware.rw_handles [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52320396-29dd-06f8-786e-e16a5e4a33d7/disk-0.vmdk due to incomplete transfer. [ 1440.357441] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5039dcb1-47fa-4107-8dc0-63562cbe3fb2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.372313] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52320396-29dd-06f8-786e-e16a5e4a33d7/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1440.372542] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Uploaded image 3e6badee-21cf-44ac-a19a-274665d3cec2 to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1440.374698] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1440.375095] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-12adfa83-1c1b-4857-b94b-d153fd6796ba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.381973] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1440.381973] env[63241]: value = "task-1820080" [ 1440.381973] env[63241]: _type = "Task" [ 1440.381973] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.393644] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820080, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.635878] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.605s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.636521] env[63241]: DEBUG nova.compute.manager [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1440.639733] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.017s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.641393] env[63241]: INFO nova.compute.claims [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1440.717253] env[63241]: DEBUG oslo_vmware.api [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820077, 'name': PowerOnVM_Task, 'duration_secs': 0.632757} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.717900] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1440.718133] env[63241]: INFO nova.compute.manager [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Took 8.23 seconds to spawn the instance on the hypervisor. [ 1440.718314] env[63241]: DEBUG nova.compute.manager [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1440.719580] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b063341a-c1a9-44b9-af2d-b79169aaa350 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.807386] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820078, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086571} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.807761] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1440.808752] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5946e34-11b2-4ef4-b072-0bbbd58439af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.829385] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] c390d1ca-a199-4df6-847a-b543630a7bf5/c390d1ca-a199-4df6-847a-b543630a7bf5.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1440.829734] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0920b20-037e-45a0-bcce-b596cab6b03d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.854054] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820079, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.854671] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1440.854671] env[63241]: value = "task-1820081" [ 1440.854671] env[63241]: _type = "Task" [ 1440.854671] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.862202] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.890700] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820080, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.055743] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquiring lock "eaed706d-b3db-46ed-8c70-08f80479afa4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.055985] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Lock "eaed706d-b3db-46ed-8c70-08f80479afa4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.146402] env[63241]: DEBUG nova.compute.utils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1441.151023] env[63241]: DEBUG nova.compute.manager [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1441.151023] env[63241]: DEBUG nova.network.neutron [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1441.212940] env[63241]: DEBUG nova.policy [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e31706e8cd1a4bf790f23c31c71298a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b8ae8ca872a549a6918b0e060d4b4af0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1441.239044] env[63241]: INFO nova.compute.manager [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Took 40.18 seconds to build instance. [ 1441.355032] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820079, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520704} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.355833] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e2758650-2762-49f6-a678-f55425a89994/e2758650-2762-49f6-a678-f55425a89994.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1441.356069] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1441.359155] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0e654da-0583-4f81-b53c-3582063647aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.367591] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.369047] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for the task: (returnval){ [ 1441.369047] env[63241]: value = "task-1820082" [ 1441.369047] env[63241]: _type = "Task" [ 1441.369047] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.377161] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820082, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.391451] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820080, 'name': Destroy_Task, 'duration_secs': 0.710645} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.391804] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Destroyed the VM [ 1441.391931] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1441.392188] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5319ddde-c3f2-42c9-913c-d9180ca30ca5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.398429] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1441.398429] env[63241]: value = "task-1820083" [ 1441.398429] env[63241]: _type = "Task" [ 1441.398429] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.405958] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820083, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.651392] env[63241]: DEBUG nova.compute.manager [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1441.709709] env[63241]: DEBUG nova.network.neutron [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Successfully created port: cae6ea4d-1012-49d8-b413-e047b6b16de9 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1441.741761] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2205f64-cc89-4e02-adb9-2e441390034e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "27177719-5090-43de-9bca-6db6bebab7b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.949s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.871403] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820081, 'name': ReconfigVM_Task, 'duration_secs': 0.971647} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.874243] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Reconfigured VM instance instance-0000001f to attach disk [datastore1] c390d1ca-a199-4df6-847a-b543630a7bf5/c390d1ca-a199-4df6-847a-b543630a7bf5.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1441.874948] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b939972-6671-4b0d-8d00-fa68a2b82896 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.881458] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820082, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.213245} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.882667] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1441.883016] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1441.883016] env[63241]: value = "task-1820084" [ 1441.883016] env[63241]: _type = "Task" [ 1441.883016] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.883700] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618dd538-3979-4f3e-8560-7587585ad829 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.903952] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820084, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.912444] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] e2758650-2762-49f6-a678-f55425a89994/e2758650-2762-49f6-a678-f55425a89994.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1441.916016] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a3e7b09-b392-4a06-be58-dd42941aec1d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.938347] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820083, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.940047] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for the task: (returnval){ [ 1441.940047] env[63241]: value = "task-1820085" [ 1441.940047] env[63241]: _type = "Task" [ 1441.940047] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.949845] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820085, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.145310] env[63241]: DEBUG nova.compute.manager [req-cea14572-c904-4c44-a5f8-4bfb9e5e2ac0 req-88cced29-e45c-46ce-9d11-af2956d4f901 service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Received event network-changed-2e94bb05-0411-4916-b14d-3c2ebc9dfccd {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1442.145508] env[63241]: DEBUG nova.compute.manager [req-cea14572-c904-4c44-a5f8-4bfb9e5e2ac0 req-88cced29-e45c-46ce-9d11-af2956d4f901 service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Refreshing instance network info cache due to event network-changed-2e94bb05-0411-4916-b14d-3c2ebc9dfccd. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1442.145730] env[63241]: DEBUG oslo_concurrency.lockutils [req-cea14572-c904-4c44-a5f8-4bfb9e5e2ac0 req-88cced29-e45c-46ce-9d11-af2956d4f901 service nova] Acquiring lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.146313] env[63241]: DEBUG oslo_concurrency.lockutils [req-cea14572-c904-4c44-a5f8-4bfb9e5e2ac0 req-88cced29-e45c-46ce-9d11-af2956d4f901 service nova] Acquired lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.148506] env[63241]: DEBUG nova.network.neutron [req-cea14572-c904-4c44-a5f8-4bfb9e5e2ac0 req-88cced29-e45c-46ce-9d11-af2956d4f901 service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Refreshing network info cache for port 2e94bb05-0411-4916-b14d-3c2ebc9dfccd {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1442.249630] env[63241]: DEBUG nova.compute.manager [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1442.316701] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1976736b-fb9d-415c-ae50-bab44185810c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.326621] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97531261-1856-4f3e-ac7d-02850344b13a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.360439] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7cd266d-050a-4c49-839e-15c81743572e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.368397] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73bd4ec-a3a7-4101-a8c8-5fffac7ba039 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.382739] env[63241]: DEBUG nova.compute.provider_tree [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1442.395686] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820084, 'name': Rename_Task, 'duration_secs': 0.27145} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.395686] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1442.395686] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8284e259-65dd-4bb5-99f0-2bcb32e0acdd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.402419] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1442.402419] env[63241]: value = "task-1820086" [ 1442.402419] env[63241]: _type = "Task" [ 1442.402419] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.412715] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820086, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.415834] env[63241]: DEBUG oslo_vmware.api [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820083, 'name': RemoveSnapshot_Task, 'duration_secs': 0.571769} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.416051] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1442.416306] env[63241]: INFO nova.compute.manager [None req-c1de615d-2036-4ec6-bae1-7636b8b7e87e tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Took 14.00 seconds to snapshot the instance on the hypervisor. [ 1442.450326] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820085, 'name': ReconfigVM_Task, 'duration_secs': 0.353887} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.450618] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Reconfigured VM instance instance-0000001e to attach disk [datastore1] e2758650-2762-49f6-a678-f55425a89994/e2758650-2762-49f6-a678-f55425a89994.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1442.451307] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87e13726-4e21-48b4-a46d-e0d04cfab30e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.458997] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for the task: (returnval){ [ 1442.458997] env[63241]: value = "task-1820087" [ 1442.458997] env[63241]: _type = "Task" [ 1442.458997] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.471202] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820087, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.670740] env[63241]: DEBUG nova.compute.manager [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1442.698678] env[63241]: DEBUG nova.virt.hardware [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1442.698924] env[63241]: DEBUG nova.virt.hardware [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1442.699091] env[63241]: DEBUG nova.virt.hardware [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1442.699336] env[63241]: DEBUG nova.virt.hardware [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1442.699458] env[63241]: DEBUG nova.virt.hardware [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1442.699606] env[63241]: DEBUG nova.virt.hardware [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1442.699826] env[63241]: DEBUG nova.virt.hardware [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1442.699989] env[63241]: DEBUG nova.virt.hardware [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1442.700178] env[63241]: DEBUG nova.virt.hardware [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1442.700354] env[63241]: DEBUG nova.virt.hardware [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1442.700532] env[63241]: DEBUG nova.virt.hardware [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1442.701990] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01c2e99-da1e-40d9-bed4-288f3dc2844c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.710956] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9867b0-70d1-44a2-a8f2-6b8c81674bdf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.769817] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.885704] env[63241]: DEBUG nova.scheduler.client.report [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1442.917899] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820086, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.969141] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820087, 'name': Rename_Task, 'duration_secs': 0.148593} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.970129] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1442.970129] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22d0c979-d224-4793-8a20-02dabfe1d6a2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.972393] env[63241]: DEBUG nova.network.neutron [req-cea14572-c904-4c44-a5f8-4bfb9e5e2ac0 req-88cced29-e45c-46ce-9d11-af2956d4f901 service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Updated VIF entry in instance network info cache for port 2e94bb05-0411-4916-b14d-3c2ebc9dfccd. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1442.972739] env[63241]: DEBUG nova.network.neutron [req-cea14572-c904-4c44-a5f8-4bfb9e5e2ac0 req-88cced29-e45c-46ce-9d11-af2956d4f901 service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Updating instance_info_cache with network_info: [{"id": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "address": "fa:16:3e:fd:57:de", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e94bb05-04", "ovs_interfaceid": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.979693] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for the task: (returnval){ [ 1442.979693] env[63241]: value = "task-1820088" [ 1442.979693] env[63241]: _type = "Task" [ 1442.979693] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.990176] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820088, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.391413] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.751s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.391804] env[63241]: DEBUG nova.compute.manager [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1443.394749] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.903s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.396238] env[63241]: INFO nova.compute.claims [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1443.412385] env[63241]: DEBUG oslo_vmware.api [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820086, 'name': PowerOnVM_Task, 'duration_secs': 0.762259} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.412614] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1443.413694] env[63241]: INFO nova.compute.manager [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Took 5.42 seconds to spawn the instance on the hypervisor. [ 1443.413694] env[63241]: DEBUG nova.compute.manager [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1443.413869] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314f1ab2-ce06-46f1-b7ff-6b8ad3b26cb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.475070] env[63241]: DEBUG oslo_concurrency.lockutils [req-cea14572-c904-4c44-a5f8-4bfb9e5e2ac0 req-88cced29-e45c-46ce-9d11-af2956d4f901 service nova] Releasing lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.491249] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820088, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.653101] env[63241]: DEBUG nova.compute.manager [req-af05c9c4-7aa9-4f17-ba3f-8cb342f39fc5 req-0cef20bb-5c9e-4a46-ad1e-60eeb67c7152 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Received event network-vif-plugged-cae6ea4d-1012-49d8-b413-e047b6b16de9 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1443.653630] env[63241]: DEBUG oslo_concurrency.lockutils [req-af05c9c4-7aa9-4f17-ba3f-8cb342f39fc5 req-0cef20bb-5c9e-4a46-ad1e-60eeb67c7152 service nova] Acquiring lock "2b1805b3-2e03-410f-8222-64b8542d4a43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.653868] env[63241]: DEBUG oslo_concurrency.lockutils [req-af05c9c4-7aa9-4f17-ba3f-8cb342f39fc5 req-0cef20bb-5c9e-4a46-ad1e-60eeb67c7152 service nova] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.654162] env[63241]: DEBUG oslo_concurrency.lockutils [req-af05c9c4-7aa9-4f17-ba3f-8cb342f39fc5 req-0cef20bb-5c9e-4a46-ad1e-60eeb67c7152 service nova] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.654449] env[63241]: DEBUG nova.compute.manager [req-af05c9c4-7aa9-4f17-ba3f-8cb342f39fc5 req-0cef20bb-5c9e-4a46-ad1e-60eeb67c7152 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] No waiting events found dispatching network-vif-plugged-cae6ea4d-1012-49d8-b413-e047b6b16de9 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1443.654680] env[63241]: WARNING nova.compute.manager [req-af05c9c4-7aa9-4f17-ba3f-8cb342f39fc5 req-0cef20bb-5c9e-4a46-ad1e-60eeb67c7152 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Received unexpected event network-vif-plugged-cae6ea4d-1012-49d8-b413-e047b6b16de9 for instance with vm_state building and task_state spawning. [ 1443.790830] env[63241]: DEBUG nova.network.neutron [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Successfully updated port: cae6ea4d-1012-49d8-b413-e047b6b16de9 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1443.900798] env[63241]: DEBUG nova.compute.utils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1443.904567] env[63241]: DEBUG nova.compute.manager [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1443.904567] env[63241]: DEBUG nova.network.neutron [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1443.932840] env[63241]: INFO nova.compute.manager [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Took 32.90 seconds to build instance. [ 1443.967190] env[63241]: DEBUG nova.policy [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74bb38a9180e49efa8e14396d5d04d8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b874b5f558e48e9a83b27e69d262106', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1443.990488] env[63241]: DEBUG oslo_vmware.api [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820088, 'name': PowerOnVM_Task, 'duration_secs': 0.769791} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.990816] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1443.991051] env[63241]: INFO nova.compute.manager [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Took 8.82 seconds to spawn the instance on the hypervisor. [ 1443.991242] env[63241]: DEBUG nova.compute.manager [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1443.992065] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce6a610-8688-49fd-9182-229d94634974 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.244659] env[63241]: DEBUG nova.compute.manager [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1444.245688] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef80aa1-517f-4219-b810-0f6cedb5a495 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.296721] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.296721] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquired lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.296721] env[63241]: DEBUG nova.network.neutron [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1444.361454] env[63241]: DEBUG nova.network.neutron [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Successfully created port: 76a97e3a-997e-45eb-9aee-2696857c9aaf {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1444.405475] env[63241]: DEBUG nova.compute.manager [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1444.440197] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1b8f9eb6-668e-4ac6-9531-084b2ab29795 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "c390d1ca-a199-4df6-847a-b543630a7bf5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.225s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.514185] env[63241]: INFO nova.compute.manager [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Took 40.95 seconds to build instance. [ 1444.555247] env[63241]: INFO nova.compute.manager [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Rebuilding instance [ 1444.607312] env[63241]: DEBUG nova.compute.manager [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1444.608099] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e2c9f6-7043-47b8-bc23-8f907ccbc65c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.705930] env[63241]: DEBUG nova.network.neutron [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Successfully created port: d73b4859-4b46-4a03-b251-fc28eaadbec7 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1444.758026] env[63241]: INFO nova.compute.manager [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] instance snapshotting [ 1444.763203] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e87b4df-c20b-49bb-ad54-6d09359a2952 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.787416] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f448258-1869-4a9c-ba3f-c0591cbcdf41 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.847637] env[63241]: DEBUG nova.network.neutron [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1444.945228] env[63241]: DEBUG nova.compute.manager [None req-fbf492b3-3e71-4116-a955-56869488bc24 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f72a1045-1404-4a4c-82da-b452ea9429d3] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1444.979938] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c47004d-0ca2-4b6a-b588-0dfa693d7c8e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.992123] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b935db-df47-4256-9d3a-e3d2657cc71a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.030479] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c43d59d-30da-47cd-9f6c-cf023e0e4f8b tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Lock "e2758650-2762-49f6-a678-f55425a89994" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.425s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.034902] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c470a85-bb54-4bb6-ab10-bc46414ede62 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.044443] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076d3797-da56-4357-b641-8c0e43a3ede1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.058409] env[63241]: DEBUG nova.compute.provider_tree [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1445.121037] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1445.121330] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-117e026f-c658-4071-8ac3-90aa320c2186 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.127974] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1445.127974] env[63241]: value = "task-1820089" [ 1445.127974] env[63241]: _type = "Task" [ 1445.127974] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.138422] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820089, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.235692] env[63241]: DEBUG nova.network.neutron [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Updating instance_info_cache with network_info: [{"id": "cae6ea4d-1012-49d8-b413-e047b6b16de9", "address": "fa:16:3e:74:66:bb", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcae6ea4d-10", "ovs_interfaceid": "cae6ea4d-1012-49d8-b413-e047b6b16de9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.299880] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1445.300225] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6bdca2f2-865c-4a1b-95fa-ededdf164378 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.309297] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1445.309297] env[63241]: value = "task-1820090" [ 1445.309297] env[63241]: _type = "Task" [ 1445.309297] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.315715] env[63241]: DEBUG nova.network.neutron [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Successfully created port: 4375bdb7-133a-4c01-8ad3-b93007c0de99 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1445.321590] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820090, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.357676] env[63241]: DEBUG oslo_concurrency.lockutils [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquiring lock "e2758650-2762-49f6-a678-f55425a89994" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.359910] env[63241]: DEBUG oslo_concurrency.lockutils [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Lock "e2758650-2762-49f6-a678-f55425a89994" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.359910] env[63241]: DEBUG oslo_concurrency.lockutils [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquiring lock "e2758650-2762-49f6-a678-f55425a89994-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.360144] env[63241]: DEBUG oslo_concurrency.lockutils [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Lock "e2758650-2762-49f6-a678-f55425a89994-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.360144] env[63241]: DEBUG oslo_concurrency.lockutils [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Lock "e2758650-2762-49f6-a678-f55425a89994-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.362260] env[63241]: INFO nova.compute.manager [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Terminating instance [ 1445.364073] env[63241]: DEBUG nova.compute.manager [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1445.364288] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1445.365684] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2547f240-f40d-41a1-ad9a-02c6a0053530 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.372924] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1445.373137] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-862f1d82-c521-4258-b157-303e8df528c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.379628] env[63241]: DEBUG oslo_vmware.api [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for the task: (returnval){ [ 1445.379628] env[63241]: value = "task-1820091" [ 1445.379628] env[63241]: _type = "Task" [ 1445.379628] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.387738] env[63241]: DEBUG oslo_vmware.api [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.418448] env[63241]: DEBUG nova.compute.manager [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1445.446839] env[63241]: DEBUG nova.compute.manager [None req-fbf492b3-3e71-4116-a955-56869488bc24 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f72a1045-1404-4a4c-82da-b452ea9429d3] Instance disappeared before build. {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2440}} [ 1445.456774] env[63241]: DEBUG nova.virt.hardware [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1445.457361] env[63241]: DEBUG nova.virt.hardware [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1445.457624] env[63241]: DEBUG nova.virt.hardware [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1445.457842] env[63241]: DEBUG nova.virt.hardware [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1445.457993] env[63241]: DEBUG nova.virt.hardware [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1445.458234] env[63241]: DEBUG nova.virt.hardware [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1445.458371] env[63241]: DEBUG nova.virt.hardware [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1445.458639] env[63241]: DEBUG nova.virt.hardware [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1445.458718] env[63241]: DEBUG nova.virt.hardware [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1445.458897] env[63241]: DEBUG nova.virt.hardware [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1445.459083] env[63241]: DEBUG nova.virt.hardware [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1445.459960] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee9f8a6-ad80-4936-a182-e53373c08550 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.472173] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e3c678-bff1-4ca9-bf84-d2d039e9b224 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.538244] env[63241]: DEBUG nova.compute.manager [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1445.560941] env[63241]: DEBUG nova.scheduler.client.report [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1445.638426] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820089, 'name': PowerOffVM_Task, 'duration_secs': 0.223118} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.638715] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1445.638943] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1445.639746] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7fc37e-f58a-473e-bf15-c87b9662961d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.646136] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1445.646345] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf0de6d3-5c41-4ce8-97a9-f22e82c65749 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.671156] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1445.671441] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1445.671683] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Deleting the datastore file [datastore1] c390d1ca-a199-4df6-847a-b543630a7bf5 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1445.671977] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b393ec93-62ff-46df-9f7d-97aa92c218b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.680904] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1445.680904] env[63241]: value = "task-1820093" [ 1445.680904] env[63241]: _type = "Task" [ 1445.680904] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.685707] env[63241]: DEBUG nova.compute.manager [req-f097ac29-8ba5-460b-b555-4beb92e4a5bf req-0b847067-133e-454e-9860-b7e416def894 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Received event network-changed-cae6ea4d-1012-49d8-b413-e047b6b16de9 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1445.686835] env[63241]: DEBUG nova.compute.manager [req-f097ac29-8ba5-460b-b555-4beb92e4a5bf req-0b847067-133e-454e-9860-b7e416def894 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Refreshing instance network info cache due to event network-changed-cae6ea4d-1012-49d8-b413-e047b6b16de9. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1445.686835] env[63241]: DEBUG oslo_concurrency.lockutils [req-f097ac29-8ba5-460b-b555-4beb92e4a5bf req-0b847067-133e-454e-9860-b7e416def894 service nova] Acquiring lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.691614] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820093, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.738060] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Releasing lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.738496] env[63241]: DEBUG nova.compute.manager [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Instance network_info: |[{"id": "cae6ea4d-1012-49d8-b413-e047b6b16de9", "address": "fa:16:3e:74:66:bb", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcae6ea4d-10", "ovs_interfaceid": "cae6ea4d-1012-49d8-b413-e047b6b16de9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1445.738896] env[63241]: DEBUG oslo_concurrency.lockutils [req-f097ac29-8ba5-460b-b555-4beb92e4a5bf req-0b847067-133e-454e-9860-b7e416def894 service nova] Acquired lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.739179] env[63241]: DEBUG nova.network.neutron [req-f097ac29-8ba5-460b-b555-4beb92e4a5bf req-0b847067-133e-454e-9860-b7e416def894 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Refreshing network info cache for port cae6ea4d-1012-49d8-b413-e047b6b16de9 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1445.740822] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:66:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3d7e184-c87f-47a5-8d0d-9fa20e07e669', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cae6ea4d-1012-49d8-b413-e047b6b16de9', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1445.749175] env[63241]: DEBUG oslo.service.loopingcall [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1445.752296] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1445.752799] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72516dd9-c57f-4bf6-9385-6a368d923547 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.774248] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1445.774248] env[63241]: value = "task-1820094" [ 1445.774248] env[63241]: _type = "Task" [ 1445.774248] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.782854] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820094, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.818018] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820090, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.889999] env[63241]: DEBUG oslo_vmware.api [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820091, 'name': PowerOffVM_Task, 'duration_secs': 0.184666} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.892307] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1445.892479] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1445.892719] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a70fa32f-d300-4f7a-a499-b8ce3867ee8e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.964569] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fbf492b3-3e71-4116-a955-56869488bc24 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "f72a1045-1404-4a4c-82da-b452ea9429d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.903s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.026571] env[63241]: DEBUG nova.network.neutron [req-f097ac29-8ba5-460b-b555-4beb92e4a5bf req-0b847067-133e-454e-9860-b7e416def894 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Updated VIF entry in instance network info cache for port cae6ea4d-1012-49d8-b413-e047b6b16de9. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1446.027044] env[63241]: DEBUG nova.network.neutron [req-f097ac29-8ba5-460b-b555-4beb92e4a5bf req-0b847067-133e-454e-9860-b7e416def894 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Updating instance_info_cache with network_info: [{"id": "cae6ea4d-1012-49d8-b413-e047b6b16de9", "address": "fa:16:3e:74:66:bb", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcae6ea4d-10", "ovs_interfaceid": "cae6ea4d-1012-49d8-b413-e047b6b16de9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.058776] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.065618] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.671s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.066116] env[63241]: DEBUG nova.compute.manager [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1446.068439] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 23.531s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.190358] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820093, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095334} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.190618] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1446.190801] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1446.190977] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1446.284558] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820094, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.320074] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820090, 'name': CreateSnapshot_Task, 'duration_secs': 0.554407} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.320378] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1446.321138] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6086ca03-225a-4f8b-b0d2-030e8fe838e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.469203] env[63241]: DEBUG nova.compute.manager [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1446.530812] env[63241]: DEBUG oslo_concurrency.lockutils [req-f097ac29-8ba5-460b-b555-4beb92e4a5bf req-0b847067-133e-454e-9860-b7e416def894 service nova] Releasing lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.573346] env[63241]: DEBUG nova.compute.utils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1446.584471] env[63241]: DEBUG nova.compute.manager [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1446.584697] env[63241]: DEBUG nova.network.neutron [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1446.645026] env[63241]: DEBUG nova.policy [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'efae520b7bdf459ab1bdc0a9692026bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b1a99e9ac8f4fc0bbd763a9d91321af', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1446.784787] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820094, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.839236] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1446.839584] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b079c85c-19ac-4c0f-8b3c-0cb36e0af03d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.848086] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1446.848086] env[63241]: value = "task-1820096" [ 1446.848086] env[63241]: _type = "Task" [ 1446.848086] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.856616] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820096, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.005534] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.027648] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1447.028068] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1447.028380] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Deleting the datastore file [datastore1] e2758650-2762-49f6-a678-f55425a89994 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1447.028677] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-497df5d7-1ab8-43ae-8907-31227cf0bd8f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.036067] env[63241]: DEBUG oslo_vmware.api [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for the task: (returnval){ [ 1447.036067] env[63241]: value = "task-1820097" [ 1447.036067] env[63241]: _type = "Task" [ 1447.036067] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.049452] env[63241]: DEBUG oslo_vmware.api [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.085417] env[63241]: DEBUG nova.compute.manager [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1447.127500] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 780f3eee-f6c7-4054-8e6e-a370f74dc405 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.127668] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance a1a8342a-b00e-42c1-8c01-a95659a78caf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.127822] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 0e5447fd-a04f-4bc2-b329-e015883773b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.127960] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance fbbb7682-873d-4bb0-8d39-4aec3566b0af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.128093] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance a1f24cfe-88f0-4e73-9ade-2dcf907848a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.128215] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 44508cc6-c576-4c30-8559-75118ceba02a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.128356] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance a88ba00d-6644-4ecc-8603-a7d79ce8a4b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.128457] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 3c51d4dc-5a2c-4483-9aa5-8bab532971d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.128569] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 0c72c98b-57f0-44e5-9159-490b27eac3a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.128680] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.128821] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 94a604da-ad3d-415a-aa92-d648e3da803d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.129000] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 5fce9350-6d45-4bfb-a74b-f5b384ecb16c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.129255] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 41182989-2537-42f0-8c37-792b8b2c5206 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1447.129896] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 0115b03b-c828-4e8b-a4d2-c98f8ca69c66 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.130102] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance d60c3a22-19fb-4826-be88-d0307810a079 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1447.130240] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance ac35fa03-aeca-4e18-84ab-cb80bb4cabfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.130362] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 5060e745-08d0-429e-8780-bfdad7a29f30 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.130488] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 40217405-dcba-48cf-9d92-4122390d9fa8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1447.130615] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance f1c19f17-ce7c-481a-99fd-d0bb20f1520b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1447.130730] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 27177719-5090-43de-9bca-6db6bebab7b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.130843] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e2758650-2762-49f6-a678-f55425a89994 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.130952] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance c390d1ca-a199-4df6-847a-b543630a7bf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.131078] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 2b1805b3-2e03-410f-8222-64b8542d4a43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.131206] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 78894fda-8309-430a-ab38-ce1a415d83d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.131333] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 343a7e90-5e55-4125-8475-44050f267987 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1447.142276] env[63241]: DEBUG nova.network.neutron [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Successfully created port: c69d6232-5a3b-404e-b2ce-6724865adf54 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1447.235904] env[63241]: DEBUG nova.virt.hardware [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1447.236178] env[63241]: DEBUG nova.virt.hardware [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1447.236353] env[63241]: DEBUG nova.virt.hardware [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1447.236543] env[63241]: DEBUG nova.virt.hardware [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1447.237237] env[63241]: DEBUG nova.virt.hardware [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1447.237237] env[63241]: DEBUG nova.virt.hardware [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1447.237237] env[63241]: DEBUG nova.virt.hardware [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1447.237409] env[63241]: DEBUG nova.virt.hardware [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1447.238033] env[63241]: DEBUG nova.virt.hardware [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1447.238033] env[63241]: DEBUG nova.virt.hardware [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1447.238033] env[63241]: DEBUG nova.virt.hardware [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1447.238801] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cf39dc-a536-4a2b-a0de-2fe284dadd99 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.249473] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c296b220-0802-4df6-acb4-623a623403e3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.264912] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1447.271482] env[63241]: DEBUG oslo.service.loopingcall [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1447.271809] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1447.272058] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6a71c36-e6c5-4538-9c1c-9e0e0ffbff28 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.284816] env[63241]: DEBUG nova.network.neutron [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Successfully updated port: 76a97e3a-997e-45eb-9aee-2696857c9aaf {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1447.298054] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820094, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.298269] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1447.298269] env[63241]: value = "task-1820098" [ 1447.298269] env[63241]: _type = "Task" [ 1447.298269] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.310597] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820098, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.358505] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820096, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.547884] env[63241]: DEBUG oslo_vmware.api [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Task: {'id': task-1820097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146088} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.547884] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1447.547884] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1447.548142] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1447.548293] env[63241]: INFO nova.compute.manager [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] [instance: e2758650-2762-49f6-a678-f55425a89994] Took 2.18 seconds to destroy the instance on the hypervisor. [ 1447.548780] env[63241]: DEBUG oslo.service.loopingcall [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1447.548891] env[63241]: DEBUG nova.compute.manager [-] [instance: e2758650-2762-49f6-a678-f55425a89994] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1447.548973] env[63241]: DEBUG nova.network.neutron [-] [instance: e2758650-2762-49f6-a678-f55425a89994] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1447.636638] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance c1c85cc0-53f1-4920-8f3e-6dd69414fa85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1447.797508] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820094, 'name': CreateVM_Task, 'duration_secs': 1.595539} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.797681] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1447.798386] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.798558] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.798927] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1447.799139] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e7134ae-2c7e-48d6-99a2-25b4372766fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.809203] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820098, 'name': CreateVM_Task, 'duration_secs': 0.263859} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.812023] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1447.812166] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1447.812166] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520b955e-571a-3d96-b8d6-cb0fbd71c456" [ 1447.812166] env[63241]: _type = "Task" [ 1447.812166] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.812429] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.824366] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520b955e-571a-3d96-b8d6-cb0fbd71c456, 'name': SearchDatastore_Task, 'duration_secs': 0.009513} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.824366] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.824366] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1447.824366] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.824490] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.824490] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1447.824490] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.824490] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1447.824490] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29d6cf3c-4fae-4015-b657-76777311d2bd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.825136] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-732adaac-68dc-4603-a96d-3a6cced54548 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.833439] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1447.833439] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a77617-2f9a-b64e-0baa-e284c6b086e5" [ 1447.833439] env[63241]: _type = "Task" [ 1447.833439] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.834538] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1447.834706] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1447.835690] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-367d5dec-e1fb-49d1-9cde-1fa6c2092757 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.841551] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a77617-2f9a-b64e-0baa-e284c6b086e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.844670] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1447.844670] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52363f5e-c128-3274-8264-09800e4db4ad" [ 1447.844670] env[63241]: _type = "Task" [ 1447.844670] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.852332] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52363f5e-c128-3274-8264-09800e4db4ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.861241] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820096, 'name': CloneVM_Task, 'duration_secs': 1.010017} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.861241] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Created linked-clone VM from snapshot [ 1447.861241] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf79179-a7ee-4795-955c-f2c9abc6f2be {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.868193] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Uploading image dcb38905-84bc-4f0d-9bbd-6e6343d3cd85 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1447.893580] env[63241]: DEBUG oslo_vmware.rw_handles [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1447.893580] env[63241]: value = "vm-377026" [ 1447.893580] env[63241]: _type = "VirtualMachine" [ 1447.893580] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1447.893915] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5f92f559-d91a-48fa-a4eb-081fd4d35520 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.900432] env[63241]: DEBUG oslo_vmware.rw_handles [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lease: (returnval){ [ 1447.900432] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521629d5-f190-200e-f9bf-17cce54ee4f3" [ 1447.900432] env[63241]: _type = "HttpNfcLease" [ 1447.900432] env[63241]: } obtained for exporting VM: (result){ [ 1447.900432] env[63241]: value = "vm-377026" [ 1447.900432] env[63241]: _type = "VirtualMachine" [ 1447.900432] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1447.900753] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the lease: (returnval){ [ 1447.900753] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521629d5-f190-200e-f9bf-17cce54ee4f3" [ 1447.900753] env[63241]: _type = "HttpNfcLease" [ 1447.900753] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1447.914282] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1447.914282] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521629d5-f190-200e-f9bf-17cce54ee4f3" [ 1447.914282] env[63241]: _type = "HttpNfcLease" [ 1447.914282] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1447.914282] env[63241]: DEBUG oslo_vmware.rw_handles [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1447.914282] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521629d5-f190-200e-f9bf-17cce54ee4f3" [ 1447.914282] env[63241]: _type = "HttpNfcLease" [ 1447.914282] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1447.914664] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2b8cec-33b4-4d82-bd4d-14e71ac9dedc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.928024] env[63241]: DEBUG oslo_vmware.rw_handles [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255f928-1323-499f-fa76-4760bea02b11/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1447.928024] env[63241]: DEBUG oslo_vmware.rw_handles [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255f928-1323-499f-fa76-4760bea02b11/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1448.020062] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b7f4fe52-d6cb-412c-8b50-7076556f4390 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.101287] env[63241]: DEBUG nova.compute.manager [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1448.107293] env[63241]: DEBUG nova.compute.manager [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received event network-vif-plugged-76a97e3a-997e-45eb-9aee-2696857c9aaf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1448.107293] env[63241]: DEBUG oslo_concurrency.lockutils [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] Acquiring lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.107293] env[63241]: DEBUG oslo_concurrency.lockutils [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] Lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.107293] env[63241]: DEBUG oslo_concurrency.lockutils [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] Lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.107293] env[63241]: DEBUG nova.compute.manager [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] No waiting events found dispatching network-vif-plugged-76a97e3a-997e-45eb-9aee-2696857c9aaf {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1448.107531] env[63241]: WARNING nova.compute.manager [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received unexpected event network-vif-plugged-76a97e3a-997e-45eb-9aee-2696857c9aaf for instance with vm_state building and task_state spawning. [ 1448.107531] env[63241]: DEBUG nova.compute.manager [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received event network-changed-76a97e3a-997e-45eb-9aee-2696857c9aaf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1448.107531] env[63241]: DEBUG nova.compute.manager [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Refreshing instance network info cache due to event network-changed-76a97e3a-997e-45eb-9aee-2696857c9aaf. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1448.107531] env[63241]: DEBUG oslo_concurrency.lockutils [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] Acquiring lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.107531] env[63241]: DEBUG oslo_concurrency.lockutils [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] Acquired lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.107764] env[63241]: DEBUG nova.network.neutron [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Refreshing network info cache for port 76a97e3a-997e-45eb-9aee-2696857c9aaf {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1448.136953] env[63241]: DEBUG nova.virt.hardware [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1448.137270] env[63241]: DEBUG nova.virt.hardware [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1448.137417] env[63241]: DEBUG nova.virt.hardware [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1448.137617] env[63241]: DEBUG nova.virt.hardware [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1448.137755] env[63241]: DEBUG nova.virt.hardware [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1448.137897] env[63241]: DEBUG nova.virt.hardware [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1448.138148] env[63241]: DEBUG nova.virt.hardware [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1448.138302] env[63241]: DEBUG nova.virt.hardware [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1448.138492] env[63241]: DEBUG nova.virt.hardware [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1448.138687] env[63241]: DEBUG nova.virt.hardware [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1448.138882] env[63241]: DEBUG nova.virt.hardware [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1448.140026] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 81854e13-e0c1-43a9-8529-678d56d57bbf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1448.142743] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8b3fc5-aecf-4ef1-8910-33e28bf06c08 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.152117] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec5e660-7039-438a-9b5b-d5ac3df2be96 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.346371] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a77617-2f9a-b64e-0baa-e284c6b086e5, 'name': SearchDatastore_Task, 'duration_secs': 0.016454} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.351508] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.351869] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1448.352223] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.360837] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52363f5e-c128-3274-8264-09800e4db4ad, 'name': SearchDatastore_Task, 'duration_secs': 0.009098} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.362079] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f76c7265-ef52-4c4c-b376-3795df31b885 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.369467] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1448.369467] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5274302d-ee41-59a5-4f23-c217a919c250" [ 1448.369467] env[63241]: _type = "Task" [ 1448.369467] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.382596] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5274302d-ee41-59a5-4f23-c217a919c250, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.613300] env[63241]: DEBUG nova.network.neutron [-] [instance: e2758650-2762-49f6-a678-f55425a89994] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.647271] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance bef91c1c-a418-4464-ae7b-883ffb7e9695 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1448.649751] env[63241]: DEBUG nova.network.neutron [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1448.737806] env[63241]: DEBUG nova.network.neutron [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.880934] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5274302d-ee41-59a5-4f23-c217a919c250, 'name': SearchDatastore_Task, 'duration_secs': 0.014036} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.881248] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.881621] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 2b1805b3-2e03-410f-8222-64b8542d4a43/2b1805b3-2e03-410f-8222-64b8542d4a43.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1448.881928] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.882186] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1448.882458] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0e7de6e-8d71-49eb-b3a5-431fc3e84117 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.884964] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ec343cc-41d1-4337-ae33-7ff0f583f356 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.892863] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1448.892863] env[63241]: value = "task-1820100" [ 1448.892863] env[63241]: _type = "Task" [ 1448.892863] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.896819] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1448.897154] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1448.898427] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e81a3c20-879b-48f0-a6d9-4210e3d40e07 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.904056] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.907387] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1448.907387] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52928ed4-b876-30d0-c2ac-ba6eb1723bd8" [ 1448.907387] env[63241]: _type = "Task" [ 1448.907387] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.915714] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52928ed4-b876-30d0-c2ac-ba6eb1723bd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.116686] env[63241]: INFO nova.compute.manager [-] [instance: e2758650-2762-49f6-a678-f55425a89994] Took 1.57 seconds to deallocate network for instance. [ 1449.153733] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance efbe39fa-d581-41ac-b51c-9c94c9839d7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1449.240576] env[63241]: DEBUG oslo_concurrency.lockutils [req-67ae7145-1de3-4920-9826-9f00163577f9 req-cbcdc07e-6981-4bdb-a034-b5f89f64517d service nova] Releasing lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.325115] env[63241]: DEBUG nova.network.neutron [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Successfully updated port: c69d6232-5a3b-404e-b2ce-6724865adf54 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1449.391831] env[63241]: DEBUG nova.compute.manager [req-5406d9ac-cca2-4c60-9155-17e5b87480f0 req-e16d9361-c1ac-4a06-b6ce-db794fd49258 service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received event network-vif-plugged-d73b4859-4b46-4a03-b251-fc28eaadbec7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1449.392179] env[63241]: DEBUG oslo_concurrency.lockutils [req-5406d9ac-cca2-4c60-9155-17e5b87480f0 req-e16d9361-c1ac-4a06-b6ce-db794fd49258 service nova] Acquiring lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.392513] env[63241]: DEBUG oslo_concurrency.lockutils [req-5406d9ac-cca2-4c60-9155-17e5b87480f0 req-e16d9361-c1ac-4a06-b6ce-db794fd49258 service nova] Lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.392797] env[63241]: DEBUG oslo_concurrency.lockutils [req-5406d9ac-cca2-4c60-9155-17e5b87480f0 req-e16d9361-c1ac-4a06-b6ce-db794fd49258 service nova] Lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.393111] env[63241]: DEBUG nova.compute.manager [req-5406d9ac-cca2-4c60-9155-17e5b87480f0 req-e16d9361-c1ac-4a06-b6ce-db794fd49258 service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] No waiting events found dispatching network-vif-plugged-d73b4859-4b46-4a03-b251-fc28eaadbec7 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1449.393384] env[63241]: WARNING nova.compute.manager [req-5406d9ac-cca2-4c60-9155-17e5b87480f0 req-e16d9361-c1ac-4a06-b6ce-db794fd49258 service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received unexpected event network-vif-plugged-d73b4859-4b46-4a03-b251-fc28eaadbec7 for instance with vm_state building and task_state spawning. [ 1449.394731] env[63241]: DEBUG nova.network.neutron [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Successfully updated port: d73b4859-4b46-4a03-b251-fc28eaadbec7 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1449.407412] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820100, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509456} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.408140] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 2b1805b3-2e03-410f-8222-64b8542d4a43/2b1805b3-2e03-410f-8222-64b8542d4a43.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1449.408140] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1449.411999] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80511343-d27e-4caa-a058-671e594b4bd4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.420261] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52928ed4-b876-30d0-c2ac-ba6eb1723bd8, 'name': SearchDatastore_Task, 'duration_secs': 0.008928} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.422178] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1449.422178] env[63241]: value = "task-1820101" [ 1449.422178] env[63241]: _type = "Task" [ 1449.422178] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.422448] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feaea750-7627-406f-9709-9d5f709950a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.430472] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1449.430472] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a5e625-5a90-5ea1-f059-e1195cfa44df" [ 1449.430472] env[63241]: _type = "Task" [ 1449.430472] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.433755] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820101, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.442741] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a5e625-5a90-5ea1-f059-e1195cfa44df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.627226] env[63241]: DEBUG oslo_concurrency.lockutils [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.658393] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 72a11582-1fad-428a-bde1-e9d0b05731cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1449.828772] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.828962] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquired lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.829258] env[63241]: DEBUG nova.network.neutron [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1449.938206] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820101, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066556} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.943281] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1449.944545] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6eb6489-c118-418d-b9c1-f17d6ec23850 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.956739] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a5e625-5a90-5ea1-f059-e1195cfa44df, 'name': SearchDatastore_Task, 'duration_secs': 0.010307} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.972247] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.972660] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c390d1ca-a199-4df6-847a-b543630a7bf5/c390d1ca-a199-4df6-847a-b543630a7bf5.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1449.988750] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 2b1805b3-2e03-410f-8222-64b8542d4a43/2b1805b3-2e03-410f-8222-64b8542d4a43.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1449.989226] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20b76ff2-c935-448e-b47d-d87e5c01cd9a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.992539] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96ba9f37-51f1-4dc5-a849-24b012ade347 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.024095] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1450.024095] env[63241]: value = "task-1820102" [ 1450.024095] env[63241]: _type = "Task" [ 1450.024095] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.026223] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1450.026223] env[63241]: value = "task-1820103" [ 1450.026223] env[63241]: _type = "Task" [ 1450.026223] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.042982] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.048760] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.150670] env[63241]: DEBUG nova.compute.manager [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] [instance: e2758650-2762-49f6-a678-f55425a89994] Received event network-vif-deleted-bd711358-5fa1-44eb-afea-adfbb937a52a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1450.150670] env[63241]: DEBUG nova.compute.manager [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Received event network-vif-plugged-c69d6232-5a3b-404e-b2ce-6724865adf54 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1450.150670] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] Acquiring lock "343a7e90-5e55-4125-8475-44050f267987-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.150670] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] Lock "343a7e90-5e55-4125-8475-44050f267987-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.150975] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] Lock "343a7e90-5e55-4125-8475-44050f267987-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.150975] env[63241]: DEBUG nova.compute.manager [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] No waiting events found dispatching network-vif-plugged-c69d6232-5a3b-404e-b2ce-6724865adf54 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1450.151174] env[63241]: WARNING nova.compute.manager [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Received unexpected event network-vif-plugged-c69d6232-5a3b-404e-b2ce-6724865adf54 for instance with vm_state building and task_state spawning. [ 1450.151303] env[63241]: DEBUG nova.compute.manager [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Received event network-changed-c69d6232-5a3b-404e-b2ce-6724865adf54 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1450.151457] env[63241]: DEBUG nova.compute.manager [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Refreshing instance network info cache due to event network-changed-c69d6232-5a3b-404e-b2ce-6724865adf54. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1450.151617] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] Acquiring lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.164923] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 0b7c72e0-79b9-4435-9676-7a0e9afaf936 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1450.387124] env[63241]: DEBUG nova.network.neutron [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1450.541766] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.545403] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820103, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.570045] env[63241]: DEBUG nova.network.neutron [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Updating instance_info_cache with network_info: [{"id": "c69d6232-5a3b-404e-b2ce-6724865adf54", "address": "fa:16:3e:2e:de:23", "network": {"id": "7f61cd58-880b-488b-8ae6-bc6d0b2a3806", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1752925058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1a99e9ac8f4fc0bbd763a9d91321af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc69d6232-5a", "ovs_interfaceid": "c69d6232-5a3b-404e-b2ce-6724865adf54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.665073] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 5203c12e-14a0-4736-8185-8ead9a29b03b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.045522] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820103, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.049533] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820102, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.893553} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.049857] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c390d1ca-a199-4df6-847a-b543630a7bf5/c390d1ca-a199-4df6-847a-b543630a7bf5.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1451.050309] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1451.050466] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-243e9726-83d0-4625-b671-7f00c48405bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.057354] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1451.057354] env[63241]: value = "task-1820104" [ 1451.057354] env[63241]: _type = "Task" [ 1451.057354] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.066966] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820104, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.072685] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Releasing lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.073094] env[63241]: DEBUG nova.compute.manager [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Instance network_info: |[{"id": "c69d6232-5a3b-404e-b2ce-6724865adf54", "address": "fa:16:3e:2e:de:23", "network": {"id": "7f61cd58-880b-488b-8ae6-bc6d0b2a3806", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1752925058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1a99e9ac8f4fc0bbd763a9d91321af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc69d6232-5a", "ovs_interfaceid": "c69d6232-5a3b-404e-b2ce-6724865adf54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1451.073480] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] Acquired lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.073722] env[63241]: DEBUG nova.network.neutron [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Refreshing network info cache for port c69d6232-5a3b-404e-b2ce-6724865adf54 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1451.075147] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:de:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c118a9ee-84f7-4f09-8a21-05600ed3cc06', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c69d6232-5a3b-404e-b2ce-6724865adf54', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1451.085365] env[63241]: DEBUG oslo.service.loopingcall [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1451.089067] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 343a7e90-5e55-4125-8475-44050f267987] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1451.089686] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2860c09-8429-43c2-86cd-e81523a05ea4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.115602] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1451.115602] env[63241]: value = "task-1820105" [ 1451.115602] env[63241]: _type = "Task" [ 1451.115602] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.126259] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820105, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.168480] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e3df56a7-eb82-4297-8aa3-f77c0380b6ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.450317] env[63241]: DEBUG nova.network.neutron [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Successfully updated port: 4375bdb7-133a-4c01-8ad3-b93007c0de99 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1451.495251] env[63241]: DEBUG nova.network.neutron [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Updated VIF entry in instance network info cache for port c69d6232-5a3b-404e-b2ce-6724865adf54. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1451.495771] env[63241]: DEBUG nova.network.neutron [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Updating instance_info_cache with network_info: [{"id": "c69d6232-5a3b-404e-b2ce-6724865adf54", "address": "fa:16:3e:2e:de:23", "network": {"id": "7f61cd58-880b-488b-8ae6-bc6d0b2a3806", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1752925058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1a99e9ac8f4fc0bbd763a9d91321af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc69d6232-5a", "ovs_interfaceid": "c69d6232-5a3b-404e-b2ce-6724865adf54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.540779] env[63241]: DEBUG nova.compute.manager [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received event network-changed-d73b4859-4b46-4a03-b251-fc28eaadbec7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1451.541008] env[63241]: DEBUG nova.compute.manager [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Refreshing instance network info cache due to event network-changed-d73b4859-4b46-4a03-b251-fc28eaadbec7. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1451.541224] env[63241]: DEBUG oslo_concurrency.lockutils [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] Acquiring lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.541364] env[63241]: DEBUG oslo_concurrency.lockutils [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] Acquired lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.541519] env[63241]: DEBUG nova.network.neutron [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Refreshing network info cache for port d73b4859-4b46-4a03-b251-fc28eaadbec7 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1451.549723] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820103, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.567978] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820104, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069693} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.568277] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1451.569113] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c8da51-2ede-4f0d-845d-4a574c59241f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.589415] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] c390d1ca-a199-4df6-847a-b543630a7bf5/c390d1ca-a199-4df6-847a-b543630a7bf5.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1451.590122] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18f71799-9bb6-4c97-a3da-ffe06cb515e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.611123] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1451.611123] env[63241]: value = "task-1820106" [ 1451.611123] env[63241]: _type = "Task" [ 1451.611123] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.622450] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820106, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.628448] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820105, 'name': CreateVM_Task, 'duration_secs': 0.399863} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.628619] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 343a7e90-5e55-4125-8475-44050f267987] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1451.629319] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.629481] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.629909] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1451.630191] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c798a1fa-fbc1-4739-a89a-56e2af496a27 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.635391] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1451.635391] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5296daaf-180f-726a-f51f-9e5463ca4687" [ 1451.635391] env[63241]: _type = "Task" [ 1451.635391] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.644758] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5296daaf-180f-726a-f51f-9e5463ca4687, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.672053] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 9361ee6a-7c4d-4409-bc3c-7da7d4550d97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.953185] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.999103] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab68d9ae-6e22-4fa6-be9a-110578ad8fd6 req-77c8d2f0-d18f-44df-a997-85eb563b05b2 service nova] Releasing lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.047234] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820103, 'name': ReconfigVM_Task, 'duration_secs': 1.547293} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.047600] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 2b1805b3-2e03-410f-8222-64b8542d4a43/2b1805b3-2e03-410f-8222-64b8542d4a43.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1452.048550] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9242008e-7f37-48c1-beea-3d0c58bfb430 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.055385] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1452.055385] env[63241]: value = "task-1820107" [ 1452.055385] env[63241]: _type = "Task" [ 1452.055385] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.064249] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820107, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.096071] env[63241]: DEBUG nova.network.neutron [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1452.122423] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820106, 'name': ReconfigVM_Task, 'duration_secs': 0.277233} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.122778] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Reconfigured VM instance instance-0000001f to attach disk [datastore1] c390d1ca-a199-4df6-847a-b543630a7bf5/c390d1ca-a199-4df6-847a-b543630a7bf5.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1452.123611] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f7f5ed9-6b17-4187-8dfb-bde670fd5e87 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.132220] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1452.132220] env[63241]: value = "task-1820108" [ 1452.132220] env[63241]: _type = "Task" [ 1452.132220] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.145547] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5296daaf-180f-726a-f51f-9e5463ca4687, 'name': SearchDatastore_Task, 'duration_secs': 0.010788} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.150853] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.151254] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1452.151587] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.151805] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.152088] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1452.152469] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820108, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.152776] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b37d73a-ab96-451d-9763-cdcd5dab78d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.162223] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1452.162426] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1452.163222] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7002f300-b169-464d-83c5-5ec90dd938ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.169259] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1452.169259] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520228fd-f6df-d8a9-8489-7d02c561cef3" [ 1452.169259] env[63241]: _type = "Task" [ 1452.169259] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.174798] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 2d1425f2-ddf9-4e82-bcfe-e11c597d011a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1452.179199] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520228fd-f6df-d8a9-8489-7d02c561cef3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.196155] env[63241]: DEBUG nova.network.neutron [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1452.568822] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820107, 'name': Rename_Task, 'duration_secs': 0.185874} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.569142] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1452.569398] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bac63622-b82d-4983-a2ac-658b4332d968 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.576738] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1452.576738] env[63241]: value = "task-1820109" [ 1452.576738] env[63241]: _type = "Task" [ 1452.576738] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.586437] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820109, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.642901] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820108, 'name': Rename_Task, 'duration_secs': 0.152551} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.643170] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1452.643428] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d40e4c0f-b273-4bf1-84f9-6dca28adfded {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.650798] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1452.650798] env[63241]: value = "task-1820110" [ 1452.650798] env[63241]: _type = "Task" [ 1452.650798] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.662375] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820110, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.679334] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520228fd-f6df-d8a9-8489-7d02c561cef3, 'name': SearchDatastore_Task, 'duration_secs': 0.009135} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.680691] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e753da08-d4a5-4f17-85c8-154e843798c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1452.681949] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6299e973-6b70-4189-9213-a8a888dd33d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.687897] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1452.687897] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521e7550-3e28-616b-f497-8c4c885a970f" [ 1452.687897] env[63241]: _type = "Task" [ 1452.687897] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.696901] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521e7550-3e28-616b-f497-8c4c885a970f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.698514] env[63241]: DEBUG oslo_concurrency.lockutils [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] Releasing lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.698766] env[63241]: DEBUG nova.compute.manager [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received event network-vif-plugged-4375bdb7-133a-4c01-8ad3-b93007c0de99 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1452.698957] env[63241]: DEBUG oslo_concurrency.lockutils [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] Acquiring lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.699536] env[63241]: DEBUG oslo_concurrency.lockutils [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] Lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.699536] env[63241]: DEBUG oslo_concurrency.lockutils [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] Lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.699536] env[63241]: DEBUG nova.compute.manager [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] No waiting events found dispatching network-vif-plugged-4375bdb7-133a-4c01-8ad3-b93007c0de99 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1452.699788] env[63241]: WARNING nova.compute.manager [req-4df64c8a-aff8-4de9-a4d7-5a5606b1f517 req-5d144621-554c-4585-a098-d8a82a2b290f service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received unexpected event network-vif-plugged-4375bdb7-133a-4c01-8ad3-b93007c0de99 for instance with vm_state building and task_state spawning. [ 1452.699937] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquired lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.700098] env[63241]: DEBUG nova.network.neutron [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1453.087042] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820109, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.161767] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820110, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.186102] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance eaed706d-b3db-46ed-8c70-08f80479afa4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1453.186102] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 21 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1453.186477] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4544MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=21 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1453.199075] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521e7550-3e28-616b-f497-8c4c885a970f, 'name': SearchDatastore_Task, 'duration_secs': 0.010095} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.199209] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.199514] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 343a7e90-5e55-4125-8475-44050f267987/343a7e90-5e55-4125-8475-44050f267987.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1453.199787] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95935e45-4e05-4dc4-aeef-bad35a28010c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.209435] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1453.209435] env[63241]: value = "task-1820111" [ 1453.209435] env[63241]: _type = "Task" [ 1453.209435] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.217957] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820111, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.244112] env[63241]: DEBUG nova.network.neutron [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1453.578124] env[63241]: DEBUG nova.compute.manager [req-2149afb4-a068-4269-82d6-277a3e735fd7 req-33f217ed-bdba-496d-9de8-bed2a8afd5e1 service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received event network-changed-4375bdb7-133a-4c01-8ad3-b93007c0de99 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1453.578124] env[63241]: DEBUG nova.compute.manager [req-2149afb4-a068-4269-82d6-277a3e735fd7 req-33f217ed-bdba-496d-9de8-bed2a8afd5e1 service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Refreshing instance network info cache due to event network-changed-4375bdb7-133a-4c01-8ad3-b93007c0de99. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1453.578759] env[63241]: DEBUG oslo_concurrency.lockutils [req-2149afb4-a068-4269-82d6-277a3e735fd7 req-33f217ed-bdba-496d-9de8-bed2a8afd5e1 service nova] Acquiring lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.593641] env[63241]: DEBUG oslo_vmware.api [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820109, 'name': PowerOnVM_Task, 'duration_secs': 0.918362} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.593940] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1453.594283] env[63241]: INFO nova.compute.manager [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Took 10.92 seconds to spawn the instance on the hypervisor. [ 1453.594521] env[63241]: DEBUG nova.compute.manager [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1453.595694] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3238b637-8e5d-4983-ba8f-112770c7efe4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.668535] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820110, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.729535] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820111, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.889358] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b155dcc9-6c38-4240-afd9-efd947969385 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.898669] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5533f4-be17-4a74-8b9e-6d35a4db770b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.931052] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6dea0d-9f35-4e12-add9-d4dbbfbc79b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.943131] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216284bd-b6fa-48c5-a5f7-48bb91b25b82 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.949036] env[63241]: DEBUG nova.network.neutron [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Updating instance_info_cache with network_info: [{"id": "76a97e3a-997e-45eb-9aee-2696857c9aaf", "address": "fa:16:3e:7e:97:fe", "network": {"id": "252d5d89-738e-45ee-ac6b-8ffd0b53e1d3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-569216011", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76a97e3a-99", "ovs_interfaceid": "76a97e3a-997e-45eb-9aee-2696857c9aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d73b4859-4b46-4a03-b251-fc28eaadbec7", "address": "fa:16:3e:34:be:94", "network": {"id": "973f42f3-6900-49cd-8468-d103467601f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1365516895", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd73b4859-4b", "ovs_interfaceid": "d73b4859-4b46-4a03-b251-fc28eaadbec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4375bdb7-133a-4c01-8ad3-b93007c0de99", "address": "fa:16:3e:a3:7a:61", "network": {"id": "252d5d89-738e-45ee-ac6b-8ffd0b53e1d3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-569216011", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4375bdb7-13", "ovs_interfaceid": "4375bdb7-133a-4c01-8ad3-b93007c0de99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.962174] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1454.126223] env[63241]: INFO nova.compute.manager [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Took 37.65 seconds to build instance. [ 1454.163441] env[63241]: DEBUG oslo_vmware.api [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820110, 'name': PowerOnVM_Task, 'duration_secs': 1.421161} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.163711] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1454.164100] env[63241]: DEBUG nova.compute.manager [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1454.164691] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aace14d4-37f6-4418-be92-5a6a2b09e345 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.222454] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820111, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559685} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.222773] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 343a7e90-5e55-4125-8475-44050f267987/343a7e90-5e55-4125-8475-44050f267987.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1454.222935] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1454.223202] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40cdfdc3-0c33-421a-ba25-f51b666e559e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.233069] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1454.233069] env[63241]: value = "task-1820112" [ 1454.233069] env[63241]: _type = "Task" [ 1454.233069] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.242532] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820112, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.452203] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Releasing lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.452296] env[63241]: DEBUG nova.compute.manager [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Instance network_info: |[{"id": "76a97e3a-997e-45eb-9aee-2696857c9aaf", "address": "fa:16:3e:7e:97:fe", "network": {"id": "252d5d89-738e-45ee-ac6b-8ffd0b53e1d3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-569216011", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76a97e3a-99", "ovs_interfaceid": "76a97e3a-997e-45eb-9aee-2696857c9aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d73b4859-4b46-4a03-b251-fc28eaadbec7", "address": "fa:16:3e:34:be:94", "network": {"id": "973f42f3-6900-49cd-8468-d103467601f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1365516895", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd73b4859-4b", "ovs_interfaceid": "d73b4859-4b46-4a03-b251-fc28eaadbec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4375bdb7-133a-4c01-8ad3-b93007c0de99", "address": "fa:16:3e:a3:7a:61", "network": {"id": "252d5d89-738e-45ee-ac6b-8ffd0b53e1d3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-569216011", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4375bdb7-13", "ovs_interfaceid": "4375bdb7-133a-4c01-8ad3-b93007c0de99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1454.452539] env[63241]: DEBUG oslo_concurrency.lockutils [req-2149afb4-a068-4269-82d6-277a3e735fd7 req-33f217ed-bdba-496d-9de8-bed2a8afd5e1 service nova] Acquired lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.452722] env[63241]: DEBUG nova.network.neutron [req-2149afb4-a068-4269-82d6-277a3e735fd7 req-33f217ed-bdba-496d-9de8-bed2a8afd5e1 service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Refreshing network info cache for port 4375bdb7-133a-4c01-8ad3-b93007c0de99 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1454.453992] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:97:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a34aa30-95be-4b18-98ca-1f2d81f7e9e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76a97e3a-997e-45eb-9aee-2696857c9aaf', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:be:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '838c9497-35dd-415e-96c7-8dc21b0cd4b3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd73b4859-4b46-4a03-b251-fc28eaadbec7', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:7a:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a34aa30-95be-4b18-98ca-1f2d81f7e9e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4375bdb7-133a-4c01-8ad3-b93007c0de99', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1454.465786] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Creating folder: Project (5b874b5f558e48e9a83b27e69d262106). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1454.466690] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1454.469981] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e937e75-7d11-489f-b67a-6c206e8f9490 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.482404] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Created folder: Project (5b874b5f558e48e9a83b27e69d262106) in parent group-v376927. [ 1454.482584] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Creating folder: Instances. Parent ref: group-v377029. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1454.482803] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-19641129-3845-4dcf-9ccc-811e4fcd7a3b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.491901] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Created folder: Instances in parent group-v377029. [ 1454.493156] env[63241]: DEBUG oslo.service.loopingcall [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1454.493156] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1454.493156] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32472462-cb2a-4c99-a2a4-aa96b02a4756 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.522876] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1454.522876] env[63241]: value = "task-1820115" [ 1454.522876] env[63241]: _type = "Task" [ 1454.522876] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.532447] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820115, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.628278] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6e811cd2-669a-462d-8df3-ecf191e46651 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.577s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.682168] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.742580] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820112, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077424} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.742859] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1454.743775] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce8a3d9-07d8-476b-b9bb-7608e4a1924e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.771905] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] 343a7e90-5e55-4125-8475-44050f267987/343a7e90-5e55-4125-8475-44050f267987.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1454.772328] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e17f246d-f23f-4067-9d79-0c605f735048 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.797044] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1454.797044] env[63241]: value = "task-1820116" [ 1454.797044] env[63241]: _type = "Task" [ 1454.797044] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.805742] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820116, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.977020] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1454.977020] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.906s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.977020] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.546s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.977020] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.977020] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.035s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.978894] env[63241]: INFO nova.compute.claims [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1455.022496] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquiring lock "c390d1ca-a199-4df6-847a-b543630a7bf5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.022496] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "c390d1ca-a199-4df6-847a-b543630a7bf5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.022496] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquiring lock "c390d1ca-a199-4df6-847a-b543630a7bf5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.022496] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "c390d1ca-a199-4df6-847a-b543630a7bf5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.022496] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "c390d1ca-a199-4df6-847a-b543630a7bf5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.023582] env[63241]: INFO nova.compute.manager [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Terminating instance [ 1455.026287] env[63241]: INFO nova.scheduler.client.report [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Deleted allocations for instance 41182989-2537-42f0-8c37-792b8b2c5206 [ 1455.033253] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquiring lock "refresh_cache-c390d1ca-a199-4df6-847a-b543630a7bf5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.033253] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquired lock "refresh_cache-c390d1ca-a199-4df6-847a-b543630a7bf5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.033253] env[63241]: DEBUG nova.network.neutron [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1455.052543] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820115, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.131705] env[63241]: DEBUG nova.compute.manager [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1455.315434] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820116, 'name': ReconfigVM_Task, 'duration_secs': 0.379405} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.315774] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Reconfigured VM instance instance-00000022 to attach disk [datastore1] 343a7e90-5e55-4125-8475-44050f267987/343a7e90-5e55-4125-8475-44050f267987.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1455.317029] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28949954-3958-4bc3-b4d2-1d3d07b69fb5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.323858] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1455.323858] env[63241]: value = "task-1820117" [ 1455.323858] env[63241]: _type = "Task" [ 1455.323858] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.333173] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820117, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.541407] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820115, 'name': CreateVM_Task, 'duration_secs': 0.677946} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.541576] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1455.542872] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.543090] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.543503] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1455.545817] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7329aeb-8b90-4174-84b6-76bb994a0954 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.548451] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fab2088c-5d9b-402a-8480-1af1fd67cd7e tempest-AttachInterfacesV270Test-1418699301 tempest-AttachInterfacesV270Test-1418699301-project-member] Lock "41182989-2537-42f0-8c37-792b8b2c5206" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.498s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.559189] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1455.559189] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b6e087-38f3-4503-9b6e-235a05c3c342" [ 1455.559189] env[63241]: _type = "Task" [ 1455.559189] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.561805] env[63241]: DEBUG nova.network.neutron [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1455.571272] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b6e087-38f3-4503-9b6e-235a05c3c342, 'name': SearchDatastore_Task} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.571605] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1455.571874] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1455.572136] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.572285] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.573521] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1455.573850] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4672b66d-ebfb-4df2-8db0-6e905d4792d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.583428] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1455.583680] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1455.584773] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc161005-b253-4658-bf5b-e3febb282e19 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.592415] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1455.592415] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ac85f7-783c-b59e-3bce-35c53500f28a" [ 1455.592415] env[63241]: _type = "Task" [ 1455.592415] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.602767] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ac85f7-783c-b59e-3bce-35c53500f28a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.628030] env[63241]: DEBUG nova.network.neutron [req-2149afb4-a068-4269-82d6-277a3e735fd7 req-33f217ed-bdba-496d-9de8-bed2a8afd5e1 service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Updated VIF entry in instance network info cache for port 4375bdb7-133a-4c01-8ad3-b93007c0de99. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1455.628488] env[63241]: DEBUG nova.network.neutron [req-2149afb4-a068-4269-82d6-277a3e735fd7 req-33f217ed-bdba-496d-9de8-bed2a8afd5e1 service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Updating instance_info_cache with network_info: [{"id": "76a97e3a-997e-45eb-9aee-2696857c9aaf", "address": "fa:16:3e:7e:97:fe", "network": {"id": "252d5d89-738e-45ee-ac6b-8ffd0b53e1d3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-569216011", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76a97e3a-99", "ovs_interfaceid": "76a97e3a-997e-45eb-9aee-2696857c9aaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d73b4859-4b46-4a03-b251-fc28eaadbec7", "address": "fa:16:3e:34:be:94", "network": {"id": "973f42f3-6900-49cd-8468-d103467601f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1365516895", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "838c9497-35dd-415e-96c7-8dc21b0cd4b3", "external-id": "nsx-vlan-transportzone-530", "segmentation_id": 530, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd73b4859-4b", "ovs_interfaceid": "d73b4859-4b46-4a03-b251-fc28eaadbec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4375bdb7-133a-4c01-8ad3-b93007c0de99", "address": "fa:16:3e:a3:7a:61", "network": {"id": "252d5d89-738e-45ee-ac6b-8ffd0b53e1d3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-569216011", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4375bdb7-13", "ovs_interfaceid": "4375bdb7-133a-4c01-8ad3-b93007c0de99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.631090] env[63241]: DEBUG nova.network.neutron [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.662525] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.682785] env[63241]: DEBUG nova.compute.manager [req-804db973-0bf9-4bf0-a0b4-97b8222c7de1 req-9f2c36d6-654d-4d62-9be7-26c19acccca0 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Received event network-changed-4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1455.682987] env[63241]: DEBUG nova.compute.manager [req-804db973-0bf9-4bf0-a0b4-97b8222c7de1 req-9f2c36d6-654d-4d62-9be7-26c19acccca0 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Refreshing instance network info cache due to event network-changed-4e927ec6-c091-40ea-8276-55eb762b414d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1455.683378] env[63241]: DEBUG oslo_concurrency.lockutils [req-804db973-0bf9-4bf0-a0b4-97b8222c7de1 req-9f2c36d6-654d-4d62-9be7-26c19acccca0 service nova] Acquiring lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.683560] env[63241]: DEBUG oslo_concurrency.lockutils [req-804db973-0bf9-4bf0-a0b4-97b8222c7de1 req-9f2c36d6-654d-4d62-9be7-26c19acccca0 service nova] Acquired lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.683760] env[63241]: DEBUG nova.network.neutron [req-804db973-0bf9-4bf0-a0b4-97b8222c7de1 req-9f2c36d6-654d-4d62-9be7-26c19acccca0 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Refreshing network info cache for port 4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1455.838205] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820117, 'name': Rename_Task, 'duration_secs': 0.15423} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.838531] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1455.838719] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51d67503-b370-4f1e-956e-f4d0c5ff8d23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.846319] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1455.846319] env[63241]: value = "task-1820118" [ 1455.846319] env[63241]: _type = "Task" [ 1455.846319] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.854643] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820118, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.104571] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ac85f7-783c-b59e-3bce-35c53500f28a, 'name': SearchDatastore_Task, 'duration_secs': 0.01341} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.107646] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0433f60d-53d6-4c43-b525-6a516337b6ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.114982] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1456.114982] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b763c2-378d-127c-1812-40ebb6aedbaf" [ 1456.114982] env[63241]: _type = "Task" [ 1456.114982] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.127462] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b763c2-378d-127c-1812-40ebb6aedbaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.133330] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Releasing lock "refresh_cache-c390d1ca-a199-4df6-847a-b543630a7bf5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.133743] env[63241]: DEBUG nova.compute.manager [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1456.133938] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1456.134447] env[63241]: DEBUG oslo_concurrency.lockutils [req-2149afb4-a068-4269-82d6-277a3e735fd7 req-33f217ed-bdba-496d-9de8-bed2a8afd5e1 service nova] Releasing lock "refresh_cache-78894fda-8309-430a-ab38-ce1a415d83d3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.135430] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507e3e3e-a9b7-4333-b34a-cde34a16a324 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.142942] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1456.145923] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d0017d4-8069-464e-96cc-f7ec0b5f520c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.152652] env[63241]: DEBUG oslo_vmware.api [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1456.152652] env[63241]: value = "task-1820119" [ 1456.152652] env[63241]: _type = "Task" [ 1456.152652] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.166918] env[63241]: DEBUG oslo_vmware.api [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820119, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.364621] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820118, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.412902] env[63241]: DEBUG oslo_vmware.rw_handles [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255f928-1323-499f-fa76-4760bea02b11/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1456.413954] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8943fd-6890-4ded-b0dd-75221a3ea4b8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.423784] env[63241]: DEBUG oslo_vmware.rw_handles [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255f928-1323-499f-fa76-4760bea02b11/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1456.423964] env[63241]: ERROR oslo_vmware.rw_handles [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255f928-1323-499f-fa76-4760bea02b11/disk-0.vmdk due to incomplete transfer. [ 1456.424212] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2cde9e9d-8e5d-4540-b936-c1c21af3a6aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.433975] env[63241]: DEBUG oslo_vmware.rw_handles [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255f928-1323-499f-fa76-4760bea02b11/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1456.434229] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Uploaded image dcb38905-84bc-4f0d-9bbd-6e6343d3cd85 to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1456.436620] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1456.439295] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-40fa1bf2-6b70-42fc-be28-2fc962ebb684 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.445742] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1456.445742] env[63241]: value = "task-1820120" [ 1456.445742] env[63241]: _type = "Task" [ 1456.445742] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.454268] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820120, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.550473] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63de34ab-f605-4096-b1a9-95cf332c9f7f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.558993] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999032bb-ca3c-4ace-b922-c7564231f7bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.597325] env[63241]: DEBUG nova.network.neutron [req-804db973-0bf9-4bf0-a0b4-97b8222c7de1 req-9f2c36d6-654d-4d62-9be7-26c19acccca0 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updated VIF entry in instance network info cache for port 4e927ec6-c091-40ea-8276-55eb762b414d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1456.598226] env[63241]: DEBUG nova.network.neutron [req-804db973-0bf9-4bf0-a0b4-97b8222c7de1 req-9f2c36d6-654d-4d62-9be7-26c19acccca0 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updating instance_info_cache with network_info: [{"id": "4e927ec6-c091-40ea-8276-55eb762b414d", "address": "fa:16:3e:13:6a:ad", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e927ec6-c0", "ovs_interfaceid": "4e927ec6-c091-40ea-8276-55eb762b414d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.603506] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15b9590-e279-4a6b-936f-3dc98667c605 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.615443] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717861c6-31b6-47a0-8ed1-4ca9d9414dee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.632205] env[63241]: DEBUG nova.compute.provider_tree [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1456.640171] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b763c2-378d-127c-1812-40ebb6aedbaf, 'name': SearchDatastore_Task, 'duration_secs': 0.011599} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.640171] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1456.640171] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 78894fda-8309-430a-ab38-ce1a415d83d3/78894fda-8309-430a-ab38-ce1a415d83d3.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1456.640171] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43d66304-84a1-411b-a083-bc51aba5a530 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.650295] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1456.650295] env[63241]: value = "task-1820121" [ 1456.650295] env[63241]: _type = "Task" [ 1456.650295] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.663622] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.667185] env[63241]: DEBUG oslo_vmware.api [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820119, 'name': PowerOffVM_Task, 'duration_secs': 0.162312} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.667718] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1456.668023] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1456.668363] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05ad646e-5d6e-4763-89ba-f33da26261e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.695151] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1456.695151] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1456.695151] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Deleting the datastore file [datastore1] c390d1ca-a199-4df6-847a-b543630a7bf5 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1456.695151] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26389bfa-d06c-40ca-b983-c188c4242713 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.701120] env[63241]: DEBUG oslo_vmware.api [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for the task: (returnval){ [ 1456.701120] env[63241]: value = "task-1820123" [ 1456.701120] env[63241]: _type = "Task" [ 1456.701120] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.709358] env[63241]: DEBUG oslo_vmware.api [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820123, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.864130] env[63241]: DEBUG oslo_vmware.api [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820118, 'name': PowerOnVM_Task, 'duration_secs': 0.563607} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.864130] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1456.864130] env[63241]: INFO nova.compute.manager [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1456.864130] env[63241]: DEBUG nova.compute.manager [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1456.864130] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6d3f11-cfaf-46c6-a344-e7e3f786dbd9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.956770] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820120, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.110689] env[63241]: DEBUG oslo_concurrency.lockutils [req-804db973-0bf9-4bf0-a0b4-97b8222c7de1 req-9f2c36d6-654d-4d62-9be7-26c19acccca0 service nova] Releasing lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.142617] env[63241]: DEBUG nova.scheduler.client.report [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1457.161937] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820121, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.216469] env[63241]: DEBUG oslo_vmware.api [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Task: {'id': task-1820123, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1159} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.216795] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1457.216943] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1457.217163] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1457.217289] env[63241]: INFO nova.compute.manager [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1457.217571] env[63241]: DEBUG oslo.service.loopingcall [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1457.217791] env[63241]: DEBUG nova.compute.manager [-] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1457.217889] env[63241]: DEBUG nova.network.neutron [-] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1457.241063] env[63241]: DEBUG nova.network.neutron [-] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1457.387479] env[63241]: INFO nova.compute.manager [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Took 34.94 seconds to build instance. [ 1457.460091] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820120, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.649019] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.671s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.649019] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1457.651903] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.736s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.655072] env[63241]: INFO nova.compute.claims [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1457.666373] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820121, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.008622} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.666639] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 78894fda-8309-430a-ab38-ce1a415d83d3/78894fda-8309-430a-ab38-ce1a415d83d3.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1457.666897] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1457.667186] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b2bfe2e-3204-467b-bfd5-8011e211a8b5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.674268] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1457.674268] env[63241]: value = "task-1820124" [ 1457.674268] env[63241]: _type = "Task" [ 1457.674268] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.685925] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820124, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.744676] env[63241]: DEBUG nova.network.neutron [-] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.893991] env[63241]: DEBUG oslo_concurrency.lockutils [None req-74e522cd-c630-4d94-9163-a8611fb9183d tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "343a7e90-5e55-4125-8475-44050f267987" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.845s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.958109] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820120, 'name': Destroy_Task, 'duration_secs': 1.226119} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.958400] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Destroyed the VM [ 1457.958793] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1457.958992] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6656e62f-2291-463b-8479-80527bd20019 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.965932] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1457.965932] env[63241]: value = "task-1820125" [ 1457.965932] env[63241]: _type = "Task" [ 1457.965932] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.974312] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820125, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.983410] env[63241]: DEBUG nova.compute.manager [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Received event network-changed-4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1457.984084] env[63241]: DEBUG nova.compute.manager [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Refreshing instance network info cache due to event network-changed-4e927ec6-c091-40ea-8276-55eb762b414d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1457.984084] env[63241]: DEBUG oslo_concurrency.lockutils [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] Acquiring lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1457.984432] env[63241]: DEBUG oslo_concurrency.lockutils [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] Acquired lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1457.984653] env[63241]: DEBUG nova.network.neutron [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Refreshing network info cache for port 4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1458.157873] env[63241]: DEBUG nova.compute.utils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1458.159312] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1458.159512] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1458.186608] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820124, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076623} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.186893] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1458.187762] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9ab25c-dc69-462d-9634-d863b217195f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.220880] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 78894fda-8309-430a-ab38-ce1a415d83d3/78894fda-8309-430a-ab38-ce1a415d83d3.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1458.222952] env[63241]: DEBUG nova.policy [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a82d851c78948b584befb1877fdae84', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b5feea1ae034391a60091ce5fa83f16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1458.224494] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efc5e929-c57f-48b5-b1ba-25c7b3b83a31 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.250080] env[63241]: INFO nova.compute.manager [-] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Took 1.03 seconds to deallocate network for instance. [ 1458.250080] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1458.250080] env[63241]: value = "task-1820126" [ 1458.250080] env[63241]: _type = "Task" [ 1458.250080] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.261707] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820126, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.398058] env[63241]: DEBUG nova.compute.manager [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1458.476361] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820125, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.521347] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Successfully created port: 70f92de2-a999-4655-b551-a67d4f67f289 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1458.663760] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1458.760181] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.766616] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820126, 'name': ReconfigVM_Task, 'duration_secs': 0.408251} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.767663] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 78894fda-8309-430a-ab38-ce1a415d83d3/78894fda-8309-430a-ab38-ce1a415d83d3.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1458.767663] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7555bbad-f300-4d2c-90f5-d878079d66ad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.774580] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1458.774580] env[63241]: value = "task-1820127" [ 1458.774580] env[63241]: _type = "Task" [ 1458.774580] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.785034] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820127, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.924782] env[63241]: DEBUG oslo_concurrency.lockutils [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.978073] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820125, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.235298] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab539e5-fb47-4fb8-82df-c02e96aff891 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.244851] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc728d2-83bd-4142-9393-40059f0a5208 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.284033] env[63241]: DEBUG nova.network.neutron [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updated VIF entry in instance network info cache for port 4e927ec6-c091-40ea-8276-55eb762b414d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1459.284388] env[63241]: DEBUG nova.network.neutron [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updating instance_info_cache with network_info: [{"id": "4e927ec6-c091-40ea-8276-55eb762b414d", "address": "fa:16:3e:13:6a:ad", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e927ec6-c0", "ovs_interfaceid": "4e927ec6-c091-40ea-8276-55eb762b414d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.293963] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1525e044-4b17-4df6-a15a-fbcc15845464 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.305057] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820127, 'name': Rename_Task, 'duration_secs': 0.2028} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.305245] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1459.305599] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bafb2c6-1b6f-456d-8c89-eb6865ae6d6d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.308233] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7795bf5-56b0-4f7d-91a6-a087dd9f8f63 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.323365] env[63241]: DEBUG nova.compute.provider_tree [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1459.326699] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1459.326699] env[63241]: value = "task-1820128" [ 1459.326699] env[63241]: _type = "Task" [ 1459.326699] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.335823] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820128, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.479892] env[63241]: DEBUG oslo_vmware.api [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820125, 'name': RemoveSnapshot_Task, 'duration_secs': 1.180134} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.482397] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1459.482397] env[63241]: INFO nova.compute.manager [None req-9174868e-50b4-478e-afd5-54c0bd196ddc tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Took 14.72 seconds to snapshot the instance on the hypervisor. [ 1459.679739] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1459.711371] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1459.711371] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1459.711371] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1459.711371] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1459.711371] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1459.711371] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1459.712233] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1459.712530] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1459.712954] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1459.713240] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1459.713528] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1459.715136] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bf8f71-ce55-427c-8e12-f5dd354f0cb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.724139] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6513a6-9f9b-4e30-8885-9fcab1a9c7d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.801367] env[63241]: DEBUG oslo_concurrency.lockutils [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] Releasing lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.801367] env[63241]: DEBUG nova.compute.manager [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Received event network-changed-cae6ea4d-1012-49d8-b413-e047b6b16de9 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1459.801367] env[63241]: DEBUG nova.compute.manager [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Refreshing instance network info cache due to event network-changed-cae6ea4d-1012-49d8-b413-e047b6b16de9. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1459.801367] env[63241]: DEBUG oslo_concurrency.lockutils [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] Acquiring lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.801367] env[63241]: DEBUG oslo_concurrency.lockutils [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] Acquired lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.801367] env[63241]: DEBUG nova.network.neutron [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Refreshing network info cache for port cae6ea4d-1012-49d8-b413-e047b6b16de9 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1459.827709] env[63241]: DEBUG nova.scheduler.client.report [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1459.840828] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820128, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.291251] env[63241]: DEBUG nova.compute.manager [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Received event network-changed-c69d6232-5a3b-404e-b2ce-6724865adf54 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1460.291448] env[63241]: DEBUG nova.compute.manager [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Refreshing instance network info cache due to event network-changed-c69d6232-5a3b-404e-b2ce-6724865adf54. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1460.291958] env[63241]: DEBUG oslo_concurrency.lockutils [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] Acquiring lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.292192] env[63241]: DEBUG oslo_concurrency.lockutils [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] Acquired lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.292387] env[63241]: DEBUG nova.network.neutron [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Refreshing network info cache for port c69d6232-5a3b-404e-b2ce-6724865adf54 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1460.337941] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.338709] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1460.346018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.623s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.346018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.346018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.089s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.347242] env[63241]: INFO nova.compute.claims [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1460.358219] env[63241]: DEBUG oslo_vmware.api [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820128, 'name': PowerOnVM_Task, 'duration_secs': 0.692983} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.358720] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1460.359152] env[63241]: INFO nova.compute.manager [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Took 14.94 seconds to spawn the instance on the hypervisor. [ 1460.359406] env[63241]: DEBUG nova.compute.manager [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1460.360238] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17746f4-aea3-4649-85e8-ba863e2cc570 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.378472] env[63241]: DEBUG nova.compute.manager [req-02ae1e45-f5ed-47c4-a670-2f7d7f6e56c8 req-4b7f798a-ca6a-42ae-a8bc-c16c8fe4613c service nova] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Received event network-vif-plugged-70f92de2-a999-4655-b551-a67d4f67f289 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1460.378472] env[63241]: DEBUG oslo_concurrency.lockutils [req-02ae1e45-f5ed-47c4-a670-2f7d7f6e56c8 req-4b7f798a-ca6a-42ae-a8bc-c16c8fe4613c service nova] Acquiring lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.378472] env[63241]: DEBUG oslo_concurrency.lockutils [req-02ae1e45-f5ed-47c4-a670-2f7d7f6e56c8 req-4b7f798a-ca6a-42ae-a8bc-c16c8fe4613c service nova] Lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.378472] env[63241]: DEBUG oslo_concurrency.lockutils [req-02ae1e45-f5ed-47c4-a670-2f7d7f6e56c8 req-4b7f798a-ca6a-42ae-a8bc-c16c8fe4613c service nova] Lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.378472] env[63241]: DEBUG nova.compute.manager [req-02ae1e45-f5ed-47c4-a670-2f7d7f6e56c8 req-4b7f798a-ca6a-42ae-a8bc-c16c8fe4613c service nova] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] No waiting events found dispatching network-vif-plugged-70f92de2-a999-4655-b551-a67d4f67f289 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1460.378472] env[63241]: WARNING nova.compute.manager [req-02ae1e45-f5ed-47c4-a670-2f7d7f6e56c8 req-4b7f798a-ca6a-42ae-a8bc-c16c8fe4613c service nova] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Received unexpected event network-vif-plugged-70f92de2-a999-4655-b551-a67d4f67f289 for instance with vm_state building and task_state spawning. [ 1460.394245] env[63241]: INFO nova.scheduler.client.report [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Deleted allocations for instance d60c3a22-19fb-4826-be88-d0307810a079 [ 1460.489172] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Successfully updated port: 70f92de2-a999-4655-b551-a67d4f67f289 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1460.663304] env[63241]: DEBUG nova.network.neutron [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Updated VIF entry in instance network info cache for port cae6ea4d-1012-49d8-b413-e047b6b16de9. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1460.663304] env[63241]: DEBUG nova.network.neutron [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Updating instance_info_cache with network_info: [{"id": "cae6ea4d-1012-49d8-b413-e047b6b16de9", "address": "fa:16:3e:74:66:bb", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcae6ea4d-10", "ovs_interfaceid": "cae6ea4d-1012-49d8-b413-e047b6b16de9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.784635] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "2b1805b3-2e03-410f-8222-64b8542d4a43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.784635] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.784635] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "2b1805b3-2e03-410f-8222-64b8542d4a43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.784635] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.784635] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.785680] env[63241]: INFO nova.compute.manager [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Terminating instance [ 1460.787780] env[63241]: DEBUG nova.compute.manager [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1460.788129] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1460.789255] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26b0dc0-0f13-47b2-92c1-2cd6069ffa28 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.804437] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1460.804686] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d44598a0-2914-4311-a07c-248c094dd375 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.811208] env[63241]: DEBUG oslo_vmware.api [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1460.811208] env[63241]: value = "task-1820129" [ 1460.811208] env[63241]: _type = "Task" [ 1460.811208] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.820113] env[63241]: DEBUG oslo_vmware.api [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820129, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.844922] env[63241]: DEBUG nova.compute.utils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1460.846816] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1460.846986] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1460.859163] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "343a7e90-5e55-4125-8475-44050f267987" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.859405] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "343a7e90-5e55-4125-8475-44050f267987" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.859671] env[63241]: INFO nova.compute.manager [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Rebooting instance [ 1460.883251] env[63241]: INFO nova.compute.manager [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Took 42.28 seconds to build instance. [ 1460.899139] env[63241]: DEBUG nova.policy [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a82d851c78948b584befb1877fdae84', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b5feea1ae034391a60091ce5fa83f16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1460.903669] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5d3901b-2855-4f20-ab80-03ff2101f5d2 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "d60c3a22-19fb-4826-be88-d0307810a079" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.220s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.996021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "refresh_cache-c1c85cc0-53f1-4920-8f3e-6dd69414fa85" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.996021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired lock "refresh_cache-c1c85cc0-53f1-4920-8f3e-6dd69414fa85" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.996021] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1461.167801] env[63241]: DEBUG oslo_concurrency.lockutils [req-cc618545-7f82-4370-9e2c-0c4820e08254 req-53e6b732-e10d-4191-8e31-6e948ca4fbf4 service nova] Releasing lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.177870] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Successfully created port: 6d8f94d0-98d7-4846-98e1-76924fe639d0 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1461.228284] env[63241]: DEBUG nova.network.neutron [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Updated VIF entry in instance network info cache for port c69d6232-5a3b-404e-b2ce-6724865adf54. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1461.228710] env[63241]: DEBUG nova.network.neutron [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Updating instance_info_cache with network_info: [{"id": "c69d6232-5a3b-404e-b2ce-6724865adf54", "address": "fa:16:3e:2e:de:23", "network": {"id": "7f61cd58-880b-488b-8ae6-bc6d0b2a3806", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1752925058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1a99e9ac8f4fc0bbd763a9d91321af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc69d6232-5a", "ovs_interfaceid": "c69d6232-5a3b-404e-b2ce-6724865adf54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.325372] env[63241]: DEBUG oslo_vmware.api [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820129, 'name': PowerOffVM_Task, 'duration_secs': 0.16743} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.325696] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1461.325987] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1461.326270] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-945410e1-3790-4574-b10f-82bcd586a7e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.352834] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1461.386871] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5d88d8b0-d3b0-4eb0-9fa1-b8f7ef32b8de tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "78894fda-8309-430a-ab38-ce1a415d83d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.892s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.401254] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.412116] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1461.412116] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1461.412116] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Deleting the datastore file [datastore1] 2b1805b3-2e03-410f-8222-64b8542d4a43 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1461.412586] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07dc322c-933f-4fb0-be51-0a618ecf0be2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.423800] env[63241]: DEBUG oslo_vmware.api [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1461.423800] env[63241]: value = "task-1820131" [ 1461.423800] env[63241]: _type = "Task" [ 1461.423800] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.432444] env[63241]: DEBUG oslo_vmware.api [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.557757] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1461.731428] env[63241]: DEBUG oslo_concurrency.lockutils [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] Releasing lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.731702] env[63241]: DEBUG nova.compute.manager [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Received event network-changed-cae6ea4d-1012-49d8-b413-e047b6b16de9 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1461.731874] env[63241]: DEBUG nova.compute.manager [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Refreshing instance network info cache due to event network-changed-cae6ea4d-1012-49d8-b413-e047b6b16de9. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1461.732146] env[63241]: DEBUG oslo_concurrency.lockutils [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] Acquiring lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.732300] env[63241]: DEBUG oslo_concurrency.lockutils [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] Acquired lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.732463] env[63241]: DEBUG nova.network.neutron [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Refreshing network info cache for port cae6ea4d-1012-49d8-b413-e047b6b16de9 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1461.733881] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquired lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.734073] env[63241]: DEBUG nova.network.neutron [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1461.739750] env[63241]: DEBUG nova.compute.manager [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1461.740666] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eda4da2-d40b-4280-8d08-b62f4af8e359 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.811249] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Updating instance_info_cache with network_info: [{"id": "70f92de2-a999-4655-b551-a67d4f67f289", "address": "fa:16:3e:a5:eb:df", "network": {"id": "7755d62b-4569-44fe-b57c-56fd80b68fdb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-822364304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5feea1ae034391a60091ce5fa83f16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70f92de2-a9", "ovs_interfaceid": "70f92de2-a999-4655-b551-a67d4f67f289", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.890570] env[63241]: DEBUG nova.compute.manager [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1461.934771] env[63241]: DEBUG oslo_vmware.api [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.951471] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d0bcb4-013a-4b7d-a122-ca36ede56e5d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.963532] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adba86a-85c5-4e50-9ec4-e3820cbeee7d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.003512] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2ec7cc-8a2a-4c22-b7b5-1b274c9157b5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.012677] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84673272-d6ab-450d-8eca-9bcf59de845c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.030667] env[63241]: DEBUG nova.compute.provider_tree [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1462.257067] env[63241]: INFO nova.compute.manager [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] instance snapshotting [ 1462.262299] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e0f2c9-4d4c-4a54-807e-f2d89c22d438 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.302243] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2ad1d4-9576-4f58-9a09-827e70dc2c37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.317019] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Releasing lock "refresh_cache-c1c85cc0-53f1-4920-8f3e-6dd69414fa85" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.317019] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Instance network_info: |[{"id": "70f92de2-a999-4655-b551-a67d4f67f289", "address": "fa:16:3e:a5:eb:df", "network": {"id": "7755d62b-4569-44fe-b57c-56fd80b68fdb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-822364304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5feea1ae034391a60091ce5fa83f16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70f92de2-a9", "ovs_interfaceid": "70f92de2-a999-4655-b551-a67d4f67f289", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1462.317019] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:eb:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '72781990-3cb3-42eb-9eb1-4040dedbf66f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70f92de2-a999-4655-b551-a67d4f67f289', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1462.323450] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Creating folder: Project (1b5feea1ae034391a60091ce5fa83f16). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1462.324945] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4aa78c87-d809-49ab-ba7c-ccc7f94e1837 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.336786] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Created folder: Project (1b5feea1ae034391a60091ce5fa83f16) in parent group-v376927. [ 1462.336978] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Creating folder: Instances. Parent ref: group-v377032. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1462.337254] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7831615b-968e-41ff-ac36-65aa4fc02383 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.349939] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Created folder: Instances in parent group-v377032. [ 1462.350205] env[63241]: DEBUG oslo.service.loopingcall [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1462.350400] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1462.350608] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f854b0e-9549-4ec0-9d3f-8ff86df7e65c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.368580] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1462.375884] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1462.375884] env[63241]: value = "task-1820134" [ 1462.375884] env[63241]: _type = "Task" [ 1462.375884] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.384385] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820134, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.403126] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1462.404019] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1462.404019] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1462.404019] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1462.404233] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1462.404294] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1462.404552] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1462.404770] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1462.404984] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1462.405228] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1462.405435] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1462.409021] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a396d173-0765-4738-bf93-825d986b08f4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.419952] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bdc179-6432-4429-99ed-cd3d746c8649 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.434680] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.443157] env[63241]: DEBUG oslo_vmware.api [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.574467} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.443503] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1462.443690] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1462.444042] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1462.444042] env[63241]: INFO nova.compute.manager [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1462.444285] env[63241]: DEBUG oslo.service.loopingcall [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1462.444472] env[63241]: DEBUG nova.compute.manager [-] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1462.444562] env[63241]: DEBUG nova.network.neutron [-] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1462.534039] env[63241]: DEBUG nova.scheduler.client.report [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1462.752146] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.752463] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.752675] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.752889] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.753105] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.755677] env[63241]: INFO nova.compute.manager [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Terminating instance [ 1462.757903] env[63241]: DEBUG nova.compute.manager [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1462.758264] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1462.759450] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26f1b1d-9147-4ab8-9a6d-de13c4b56001 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.776570] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1462.776876] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec0b664d-b152-41b6-8458-0aa5fdc6ca17 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.783998] env[63241]: DEBUG oslo_vmware.api [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1462.783998] env[63241]: value = "task-1820135" [ 1462.783998] env[63241]: _type = "Task" [ 1462.783998] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.799612] env[63241]: DEBUG oslo_vmware.api [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820135, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.817072] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1462.817200] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bbb7390f-19a8-449e-9f48-ddb78ed91157 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.825406] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1462.825406] env[63241]: value = "task-1820136" [ 1462.825406] env[63241]: _type = "Task" [ 1462.825406] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.834275] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820136, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.892461] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820134, 'name': CreateVM_Task, 'duration_secs': 0.502253} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.895039] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1462.895771] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.895950] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.896341] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1462.897097] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e90d81b-44ab-42e6-9217-7624e36608df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.903346] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1462.903346] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e1a9cc-95c4-6214-788a-f3bb93ac0f64" [ 1462.903346] env[63241]: _type = "Task" [ 1462.903346] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.914590] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e1a9cc-95c4-6214-788a-f3bb93ac0f64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.915638] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Successfully updated port: 6d8f94d0-98d7-4846-98e1-76924fe639d0 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1462.937109] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquiring lock "3dfeaf57-2244-418e-a04a-ed4143e454d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.937417] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Lock "3dfeaf57-2244-418e-a04a-ed4143e454d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.967365] env[63241]: DEBUG nova.network.neutron [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Updating instance_info_cache with network_info: [{"id": "c69d6232-5a3b-404e-b2ce-6724865adf54", "address": "fa:16:3e:2e:de:23", "network": {"id": "7f61cd58-880b-488b-8ae6-bc6d0b2a3806", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1752925058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1a99e9ac8f4fc0bbd763a9d91321af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc69d6232-5a", "ovs_interfaceid": "c69d6232-5a3b-404e-b2ce-6724865adf54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.988111] env[63241]: DEBUG nova.network.neutron [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Updated VIF entry in instance network info cache for port cae6ea4d-1012-49d8-b413-e047b6b16de9. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1462.988596] env[63241]: DEBUG nova.network.neutron [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Updating instance_info_cache with network_info: [{"id": "cae6ea4d-1012-49d8-b413-e047b6b16de9", "address": "fa:16:3e:74:66:bb", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcae6ea4d-10", "ovs_interfaceid": "cae6ea4d-1012-49d8-b413-e047b6b16de9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.039599] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.694s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.040155] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1463.043095] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.758s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.043303] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.045265] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.071s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.046754] env[63241]: INFO nova.compute.claims [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1463.052164] env[63241]: DEBUG nova.compute.manager [req-8c2520ac-104f-442d-b8a9-5c63043b28d5 req-6c514826-94cf-46b7-861c-f377472b6a2f service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Received event network-changed-4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1463.052365] env[63241]: DEBUG nova.compute.manager [req-8c2520ac-104f-442d-b8a9-5c63043b28d5 req-6c514826-94cf-46b7-861c-f377472b6a2f service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Refreshing instance network info cache due to event network-changed-4e927ec6-c091-40ea-8276-55eb762b414d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1463.052574] env[63241]: DEBUG oslo_concurrency.lockutils [req-8c2520ac-104f-442d-b8a9-5c63043b28d5 req-6c514826-94cf-46b7-861c-f377472b6a2f service nova] Acquiring lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.052691] env[63241]: DEBUG oslo_concurrency.lockutils [req-8c2520ac-104f-442d-b8a9-5c63043b28d5 req-6c514826-94cf-46b7-861c-f377472b6a2f service nova] Acquired lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.053555] env[63241]: DEBUG nova.network.neutron [req-8c2520ac-104f-442d-b8a9-5c63043b28d5 req-6c514826-94cf-46b7-861c-f377472b6a2f service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Refreshing network info cache for port 4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1463.085826] env[63241]: INFO nova.scheduler.client.report [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Deleted allocations for instance 40217405-dcba-48cf-9d92-4122390d9fa8 [ 1463.114029] env[63241]: DEBUG nova.compute.manager [req-dc00000a-582a-4f54-8c1a-9b37a6f6bbc9 req-28d421ca-7f1f-49c4-927c-30ebd9a728b4 service nova] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Received event network-changed-70f92de2-a999-4655-b551-a67d4f67f289 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1463.114238] env[63241]: DEBUG nova.compute.manager [req-dc00000a-582a-4f54-8c1a-9b37a6f6bbc9 req-28d421ca-7f1f-49c4-927c-30ebd9a728b4 service nova] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Refreshing instance network info cache due to event network-changed-70f92de2-a999-4655-b551-a67d4f67f289. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1463.114442] env[63241]: DEBUG oslo_concurrency.lockutils [req-dc00000a-582a-4f54-8c1a-9b37a6f6bbc9 req-28d421ca-7f1f-49c4-927c-30ebd9a728b4 service nova] Acquiring lock "refresh_cache-c1c85cc0-53f1-4920-8f3e-6dd69414fa85" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.114578] env[63241]: DEBUG oslo_concurrency.lockutils [req-dc00000a-582a-4f54-8c1a-9b37a6f6bbc9 req-28d421ca-7f1f-49c4-927c-30ebd9a728b4 service nova] Acquired lock "refresh_cache-c1c85cc0-53f1-4920-8f3e-6dd69414fa85" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.114728] env[63241]: DEBUG nova.network.neutron [req-dc00000a-582a-4f54-8c1a-9b37a6f6bbc9 req-28d421ca-7f1f-49c4-927c-30ebd9a728b4 service nova] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Refreshing network info cache for port 70f92de2-a999-4655-b551-a67d4f67f289 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1463.294903] env[63241]: DEBUG oslo_vmware.api [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820135, 'name': PowerOffVM_Task, 'duration_secs': 0.282752} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.295440] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1463.295643] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1463.296248] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4200e37-5376-4858-a9a3-a06190140472 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.335378] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820136, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.375469] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1463.376129] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1463.376129] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Deleting the datastore file [datastore1] fbbb7682-873d-4bb0-8d39-4aec3566b0af {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1463.376258] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e16703dc-67a7-40c8-b826-228e48666a8b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.384517] env[63241]: DEBUG oslo_vmware.api [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for the task: (returnval){ [ 1463.384517] env[63241]: value = "task-1820138" [ 1463.384517] env[63241]: _type = "Task" [ 1463.384517] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.389304] env[63241]: DEBUG nova.network.neutron [-] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.396780] env[63241]: DEBUG oslo_vmware.api [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820138, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.413084] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e1a9cc-95c4-6214-788a-f3bb93ac0f64, 'name': SearchDatastore_Task, 'duration_secs': 0.009007} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.413513] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.413632] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1463.413863] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.414017] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.414206] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1463.414458] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82ec5c4a-a0de-48f1-86f8-a8d549630c17 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.418239] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "refresh_cache-81854e13-e0c1-43a9-8529-678d56d57bbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.418395] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired lock "refresh_cache-81854e13-e0c1-43a9-8529-678d56d57bbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.418542] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1463.424509] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1463.424760] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1463.425456] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-690dfe56-c025-4406-be80-92bdd5e02303 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.433942] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1463.433942] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c3e789-e241-fdca-7791-1f87f4b1630b" [ 1463.433942] env[63241]: _type = "Task" [ 1463.433942] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.446746] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c3e789-e241-fdca-7791-1f87f4b1630b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.455721] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "78894fda-8309-430a-ab38-ce1a415d83d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.455981] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "78894fda-8309-430a-ab38-ce1a415d83d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.456210] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.456394] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.456562] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "78894fda-8309-430a-ab38-ce1a415d83d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.460281] env[63241]: INFO nova.compute.manager [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Terminating instance [ 1463.462845] env[63241]: DEBUG nova.compute.manager [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1463.463061] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1463.463972] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb08201d-204e-4fb1-ad68-e38932fca4cc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.471973] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Releasing lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.473765] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1463.474033] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ad4cc99-dea4-4942-979d-f0dcd39660c4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.476105] env[63241]: DEBUG nova.compute.manager [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1463.476957] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fed871-ac51-426e-8665-88bfd79628f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.488915] env[63241]: DEBUG oslo_vmware.api [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1463.488915] env[63241]: value = "task-1820139" [ 1463.488915] env[63241]: _type = "Task" [ 1463.488915] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.492650] env[63241]: DEBUG oslo_concurrency.lockutils [req-fe9a0795-e3e2-4581-9983-0272c8e2320c req-aff08b83-857d-4bb1-9304-f96d270c7e28 service nova] Releasing lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.498817] env[63241]: DEBUG oslo_vmware.api [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820139, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.555161] env[63241]: DEBUG nova.compute.utils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1463.555161] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1463.555161] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1463.596209] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc278c45-4652-460a-b895-342b96841cc6 tempest-ServerDiagnosticsNegativeTest-3115894 tempest-ServerDiagnosticsNegativeTest-3115894-project-member] Lock "40217405-dcba-48cf-9d92-4122390d9fa8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.365s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.696657] env[63241]: DEBUG nova.policy [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a82d851c78948b584befb1877fdae84', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b5feea1ae034391a60091ce5fa83f16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1463.838492] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820136, 'name': CreateSnapshot_Task, 'duration_secs': 0.666411} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.838994] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1463.839672] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a259afe-b000-4668-b3cc-5e2dfad62e8e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.892422] env[63241]: INFO nova.compute.manager [-] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Took 1.45 seconds to deallocate network for instance. [ 1463.901958] env[63241]: DEBUG oslo_vmware.api [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Task: {'id': task-1820138, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.261778} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.901958] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1463.902102] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1463.902197] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1463.902373] env[63241]: INFO nova.compute.manager [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1463.902607] env[63241]: DEBUG oslo.service.loopingcall [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1463.902793] env[63241]: DEBUG nova.compute.manager [-] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1463.902913] env[63241]: DEBUG nova.network.neutron [-] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1463.932601] env[63241]: DEBUG nova.network.neutron [req-dc00000a-582a-4f54-8c1a-9b37a6f6bbc9 req-28d421ca-7f1f-49c4-927c-30ebd9a728b4 service nova] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Updated VIF entry in instance network info cache for port 70f92de2-a999-4655-b551-a67d4f67f289. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1463.932968] env[63241]: DEBUG nova.network.neutron [req-dc00000a-582a-4f54-8c1a-9b37a6f6bbc9 req-28d421ca-7f1f-49c4-927c-30ebd9a728b4 service nova] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Updating instance_info_cache with network_info: [{"id": "70f92de2-a999-4655-b551-a67d4f67f289", "address": "fa:16:3e:a5:eb:df", "network": {"id": "7755d62b-4569-44fe-b57c-56fd80b68fdb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-822364304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5feea1ae034391a60091ce5fa83f16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70f92de2-a9", "ovs_interfaceid": "70f92de2-a999-4655-b551-a67d4f67f289", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1463.946243] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c3e789-e241-fdca-7791-1f87f4b1630b, 'name': SearchDatastore_Task, 'duration_secs': 0.01348} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.946886] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6147f0bc-23a2-4a68-a43d-f44ee7375a6b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.955071] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1463.955071] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f2a64f-35c4-45f9-2d6e-5b102284384e" [ 1463.955071] env[63241]: _type = "Task" [ 1463.955071] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.963541] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f2a64f-35c4-45f9-2d6e-5b102284384e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.003493] env[63241]: DEBUG oslo_vmware.api [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820139, 'name': PowerOffVM_Task, 'duration_secs': 0.215005} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.003493] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1464.003623] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1464.003844] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99da66d1-34de-4a45-825d-f9f4b9a56fb2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.008678] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1464.060282] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1464.218539] env[63241]: DEBUG nova.network.neutron [req-8c2520ac-104f-442d-b8a9-5c63043b28d5 req-6c514826-94cf-46b7-861c-f377472b6a2f service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updated VIF entry in instance network info cache for port 4e927ec6-c091-40ea-8276-55eb762b414d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1464.218911] env[63241]: DEBUG nova.network.neutron [req-8c2520ac-104f-442d-b8a9-5c63043b28d5 req-6c514826-94cf-46b7-861c-f377472b6a2f service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updating instance_info_cache with network_info: [{"id": "4e927ec6-c091-40ea-8276-55eb762b414d", "address": "fa:16:3e:13:6a:ad", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e927ec6-c0", "ovs_interfaceid": "4e927ec6-c091-40ea-8276-55eb762b414d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.230746] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1464.231447] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1464.231447] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Deleting the datastore file [datastore1] 78894fda-8309-430a-ab38-ce1a415d83d3 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1464.231447] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26b94169-8311-4da8-911c-6a45b763ff5d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.242310] env[63241]: DEBUG oslo_vmware.api [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1464.242310] env[63241]: value = "task-1820141" [ 1464.242310] env[63241]: _type = "Task" [ 1464.242310] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.256955] env[63241]: DEBUG oslo_vmware.api [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820141, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.366372] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1464.369461] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-23887252-cf1e-4795-91ad-0a4b479c02da {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.378641] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1464.378641] env[63241]: value = "task-1820142" [ 1464.378641] env[63241]: _type = "Task" [ 1464.378641] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.391986] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820142, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.399561] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.436303] env[63241]: DEBUG oslo_concurrency.lockutils [req-dc00000a-582a-4f54-8c1a-9b37a6f6bbc9 req-28d421ca-7f1f-49c4-927c-30ebd9a728b4 service nova] Releasing lock "refresh_cache-c1c85cc0-53f1-4920-8f3e-6dd69414fa85" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.470171] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f2a64f-35c4-45f9-2d6e-5b102284384e, 'name': SearchDatastore_Task, 'duration_secs': 0.012565} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.473266] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.473589] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c1c85cc0-53f1-4920-8f3e-6dd69414fa85/c1c85cc0-53f1-4920-8f3e-6dd69414fa85.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1464.474205] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04f41235-78c0-43e7-beaa-412d648dacd6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.482760] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1464.482760] env[63241]: value = "task-1820143" [ 1464.482760] env[63241]: _type = "Task" [ 1464.482760] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.499329] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.503970] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9711ebcc-69a2-4647-ac14-e55d8a351759 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.517529] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Doing hard reboot of VM {{(pid=63241) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1464.518496] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-eabc3268-553d-451c-a08a-607886002e3a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.539320] env[63241]: DEBUG oslo_vmware.api [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1464.539320] env[63241]: value = "task-1820144" [ 1464.539320] env[63241]: _type = "Task" [ 1464.539320] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.558066] env[63241]: DEBUG oslo_vmware.api [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820144, 'name': ResetVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.562092] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Updating instance_info_cache with network_info: [{"id": "6d8f94d0-98d7-4846-98e1-76924fe639d0", "address": "fa:16:3e:5f:aa:8b", "network": {"id": "7755d62b-4569-44fe-b57c-56fd80b68fdb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-822364304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5feea1ae034391a60091ce5fa83f16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d8f94d0-98", "ovs_interfaceid": "6d8f94d0-98d7-4846-98e1-76924fe639d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.694028] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Successfully created port: e6a64330-a394-41bb-9270-490bd00a4bf4 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1464.711106] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209a304f-7406-4407-a091-e08f234ad7d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.723169] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7d0caf-e60c-42fa-aac1-d48f74f3bb43 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.728365] env[63241]: DEBUG oslo_concurrency.lockutils [req-8c2520ac-104f-442d-b8a9-5c63043b28d5 req-6c514826-94cf-46b7-861c-f377472b6a2f service nova] Releasing lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.767582] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aff6db6-b7f3-4ba0-aaff-31b75fb1fa02 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.776513] env[63241]: DEBUG nova.network.neutron [-] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.781508] env[63241]: DEBUG oslo_vmware.api [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820141, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17387} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.781508] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1464.781508] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1464.781508] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1464.781800] env[63241]: INFO nova.compute.manager [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1464.781929] env[63241]: DEBUG oslo.service.loopingcall [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1464.782218] env[63241]: DEBUG nova.compute.manager [-] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1464.782349] env[63241]: DEBUG nova.network.neutron [-] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1464.785483] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9f7ead-df2c-4b83-8809-4e32fffeddcc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.803483] env[63241]: DEBUG nova.compute.provider_tree [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1464.897364] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820142, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.997891] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820143, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.050501] env[63241]: DEBUG oslo_vmware.api [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820144, 'name': ResetVM_Task, 'duration_secs': 0.097485} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.050778] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Did hard reboot of VM {{(pid=63241) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1465.051348] env[63241]: DEBUG nova.compute.manager [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1465.051954] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21cadc5-bfdb-47eb-9e14-e9e32ad02353 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.075453] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Releasing lock "refresh_cache-81854e13-e0c1-43a9-8529-678d56d57bbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1465.075900] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Instance network_info: |[{"id": "6d8f94d0-98d7-4846-98e1-76924fe639d0", "address": "fa:16:3e:5f:aa:8b", "network": {"id": "7755d62b-4569-44fe-b57c-56fd80b68fdb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-822364304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5feea1ae034391a60091ce5fa83f16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d8f94d0-98", "ovs_interfaceid": "6d8f94d0-98d7-4846-98e1-76924fe639d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1465.076345] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:aa:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '72781990-3cb3-42eb-9eb1-4040dedbf66f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d8f94d0-98d7-4846-98e1-76924fe639d0', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1465.090993] env[63241]: DEBUG oslo.service.loopingcall [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1465.092473] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1465.094174] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1465.095633] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d7f2652-4d1a-4897-bdc4-60dd2d3ea149 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.122330] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1465.122330] env[63241]: value = "task-1820145" [ 1465.122330] env[63241]: _type = "Task" [ 1465.122330] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.128855] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1465.129148] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1465.129307] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1465.129507] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1465.129661] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1465.129804] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1465.130300] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1465.130569] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1465.130826] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1465.130995] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1465.131245] env[63241]: DEBUG nova.virt.hardware [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1465.132161] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286e64b7-01e9-4d9b-9f2e-3d0f31b04b3a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.138028] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820145, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.143798] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d899f5-295f-46e0-9028-7b113d2f27a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.279247] env[63241]: INFO nova.compute.manager [-] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Took 1.38 seconds to deallocate network for instance. [ 1465.306748] env[63241]: DEBUG nova.scheduler.client.report [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1465.392225] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820142, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.494990] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820143, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563273} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.495286] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c1c85cc0-53f1-4920-8f3e-6dd69414fa85/c1c85cc0-53f1-4920-8f3e-6dd69414fa85.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1465.495503] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1465.495768] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d46f156-fec0-4b07-b90e-114a78f27ad0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.505900] env[63241]: DEBUG nova.compute.manager [req-612d32c3-68ad-490a-98ef-9e61624c676f req-43fc70d4-49ea-4f1a-9379-268505bb65a4 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Received event network-changed-4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1465.506032] env[63241]: DEBUG nova.compute.manager [req-612d32c3-68ad-490a-98ef-9e61624c676f req-43fc70d4-49ea-4f1a-9379-268505bb65a4 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Refreshing instance network info cache due to event network-changed-4e927ec6-c091-40ea-8276-55eb762b414d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1465.506260] env[63241]: DEBUG oslo_concurrency.lockutils [req-612d32c3-68ad-490a-98ef-9e61624c676f req-43fc70d4-49ea-4f1a-9379-268505bb65a4 service nova] Acquiring lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.506406] env[63241]: DEBUG oslo_concurrency.lockutils [req-612d32c3-68ad-490a-98ef-9e61624c676f req-43fc70d4-49ea-4f1a-9379-268505bb65a4 service nova] Acquired lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.506604] env[63241]: DEBUG nova.network.neutron [req-612d32c3-68ad-490a-98ef-9e61624c676f req-43fc70d4-49ea-4f1a-9379-268505bb65a4 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Refreshing network info cache for port 4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1465.511038] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1465.511038] env[63241]: value = "task-1820146" [ 1465.511038] env[63241]: _type = "Task" [ 1465.511038] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.518847] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820146, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.532464] env[63241]: DEBUG nova.compute.manager [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Received event network-vif-plugged-6d8f94d0-98d7-4846-98e1-76924fe639d0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1465.532692] env[63241]: DEBUG oslo_concurrency.lockutils [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] Acquiring lock "81854e13-e0c1-43a9-8529-678d56d57bbf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.532903] env[63241]: DEBUG oslo_concurrency.lockutils [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] Lock "81854e13-e0c1-43a9-8529-678d56d57bbf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.533084] env[63241]: DEBUG oslo_concurrency.lockutils [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] Lock "81854e13-e0c1-43a9-8529-678d56d57bbf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.533409] env[63241]: DEBUG nova.compute.manager [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] No waiting events found dispatching network-vif-plugged-6d8f94d0-98d7-4846-98e1-76924fe639d0 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1465.533409] env[63241]: WARNING nova.compute.manager [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Received unexpected event network-vif-plugged-6d8f94d0-98d7-4846-98e1-76924fe639d0 for instance with vm_state building and task_state spawning. [ 1465.533563] env[63241]: DEBUG nova.compute.manager [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Received event network-vif-deleted-cae6ea4d-1012-49d8-b413-e047b6b16de9 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1465.533727] env[63241]: DEBUG nova.compute.manager [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Received event network-changed-6d8f94d0-98d7-4846-98e1-76924fe639d0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1465.533951] env[63241]: DEBUG nova.compute.manager [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Refreshing instance network info cache due to event network-changed-6d8f94d0-98d7-4846-98e1-76924fe639d0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1465.534403] env[63241]: DEBUG oslo_concurrency.lockutils [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] Acquiring lock "refresh_cache-81854e13-e0c1-43a9-8529-678d56d57bbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.534588] env[63241]: DEBUG oslo_concurrency.lockutils [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] Acquired lock "refresh_cache-81854e13-e0c1-43a9-8529-678d56d57bbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.534753] env[63241]: DEBUG nova.network.neutron [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Refreshing network info cache for port 6d8f94d0-98d7-4846-98e1-76924fe639d0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1465.571270] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b21872ca-50eb-436b-94a7-f29b1fe2771b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "343a7e90-5e55-4125-8475-44050f267987" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.711s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.638010] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820145, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.790627] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.812517] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.767s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.813184] env[63241]: DEBUG nova.compute.manager [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1465.816163] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.758s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.816460] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.822337] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.053s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.824550] env[63241]: INFO nova.compute.claims [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1465.878101] env[63241]: INFO nova.scheduler.client.report [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Deleted allocations for instance f1c19f17-ce7c-481a-99fd-d0bb20f1520b [ 1465.894751] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820142, 'name': CloneVM_Task, 'duration_secs': 1.449213} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.895046] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Created linked-clone VM from snapshot [ 1465.895805] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c70abd8-7ec7-412a-9743-0d3cf243c5b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.904868] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Uploading image f39873a9-6d8a-4dbc-a330-041096a7e4cf {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1465.933712] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1465.933712] env[63241]: value = "vm-377036" [ 1465.933712] env[63241]: _type = "VirtualMachine" [ 1465.933712] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1465.934341] env[63241]: DEBUG oslo_concurrency.lockutils [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "2b1805b3-2e03-410f-8222-64b8542d4a43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.934341] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-50ebbf48-d8db-4502-b42b-2e82fdb8b861 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.943203] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lease: (returnval){ [ 1465.943203] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b3d757-1625-2cfb-df53-09fe04e8fc9b" [ 1465.943203] env[63241]: _type = "HttpNfcLease" [ 1465.943203] env[63241]: } obtained for exporting VM: (result){ [ 1465.943203] env[63241]: value = "vm-377036" [ 1465.943203] env[63241]: _type = "VirtualMachine" [ 1465.943203] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1465.943203] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the lease: (returnval){ [ 1465.943203] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b3d757-1625-2cfb-df53-09fe04e8fc9b" [ 1465.943203] env[63241]: _type = "HttpNfcLease" [ 1465.943203] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1465.949451] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1465.949451] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b3d757-1625-2cfb-df53-09fe04e8fc9b" [ 1465.949451] env[63241]: _type = "HttpNfcLease" [ 1465.949451] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1466.023908] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820146, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140334} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.024504] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1466.026658] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d893ae-027a-428b-9eb1-c30ae9537375 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.054281] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] c1c85cc0-53f1-4920-8f3e-6dd69414fa85/c1c85cc0-53f1-4920-8f3e-6dd69414fa85.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1466.056594] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17dc3773-86e8-4890-b781-31eddb7f1a4c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.076229] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1466.076229] env[63241]: value = "task-1820148" [ 1466.076229] env[63241]: _type = "Task" [ 1466.076229] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.084718] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820148, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.132957] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820145, 'name': CreateVM_Task, 'duration_secs': 0.543202} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.133163] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1466.133847] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1466.134013] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.134411] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1466.134677] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d7fd28b-9f15-42ce-a418-bbca5b8f60c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.139760] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1466.139760] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a3ad26-6d49-97b7-2dae-557a7761e7f5" [ 1466.139760] env[63241]: _type = "Task" [ 1466.139760] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.151544] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a3ad26-6d49-97b7-2dae-557a7761e7f5, 'name': SearchDatastore_Task, 'duration_secs': 0.009638} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.151817] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1466.152056] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1466.152286] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1466.152432] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.152606] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1466.152850] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43382901-c997-4894-8843-1549fce808c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.160341] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1466.160511] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1466.161226] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd568e4b-3845-47f3-a362-76f553e95c6c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.166079] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1466.166079] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5252e22e-5359-511b-28ec-cd15da927910" [ 1466.166079] env[63241]: _type = "Task" [ 1466.166079] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.176194] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5252e22e-5359-511b-28ec-cd15da927910, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.334411] env[63241]: DEBUG nova.compute.utils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1466.339848] env[63241]: DEBUG nova.compute.manager [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1466.340144] env[63241]: DEBUG nova.network.neutron [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1466.390309] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d7d47064-75aa-441c-b1be-54538b99c7aa tempest-DeleteServersAdminTestJSON-108135671 tempest-DeleteServersAdminTestJSON-108135671-project-member] Lock "f1c19f17-ce7c-481a-99fd-d0bb20f1520b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.470s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.457658] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1466.457658] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b3d757-1625-2cfb-df53-09fe04e8fc9b" [ 1466.457658] env[63241]: _type = "HttpNfcLease" [ 1466.457658] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1466.458253] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1466.458253] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b3d757-1625-2cfb-df53-09fe04e8fc9b" [ 1466.458253] env[63241]: _type = "HttpNfcLease" [ 1466.458253] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1466.460073] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dbefcd6-2dac-4ed9-80f0-f0785d25e823 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.468576] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522507da-746e-94dc-155d-25b8344394e5/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1466.468576] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522507da-746e-94dc-155d-25b8344394e5/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1466.531563] env[63241]: DEBUG nova.policy [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53f691b52644488c832ce1224a079218', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e48fc59560ab47ae87be73ab11b13e7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1466.569013] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d22b3b6a-df92-4b37-9666-858c9feedf70 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.588494] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820148, 'name': ReconfigVM_Task, 'duration_secs': 0.280673} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.588824] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Reconfigured VM instance instance-00000023 to attach disk [datastore1] c1c85cc0-53f1-4920-8f3e-6dd69414fa85/c1c85cc0-53f1-4920-8f3e-6dd69414fa85.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1466.589482] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd65b9cb-d434-4cdc-a22a-95cdf6f81ced {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.602041] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1466.602041] env[63241]: value = "task-1820149" [ 1466.602041] env[63241]: _type = "Task" [ 1466.602041] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.616430] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820149, 'name': Rename_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.685129] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5252e22e-5359-511b-28ec-cd15da927910, 'name': SearchDatastore_Task, 'duration_secs': 0.0089} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.686688] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-119ac8fd-42d9-4cfc-b57f-255891e5ecde {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.693824] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1466.693824] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52189329-8afe-a04b-1a15-2de3d4b9e907" [ 1466.693824] env[63241]: _type = "Task" [ 1466.693824] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.704623] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52189329-8afe-a04b-1a15-2de3d4b9e907, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.720863] env[63241]: DEBUG nova.network.neutron [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Updated VIF entry in instance network info cache for port 6d8f94d0-98d7-4846-98e1-76924fe639d0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1466.721403] env[63241]: DEBUG nova.network.neutron [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Updating instance_info_cache with network_info: [{"id": "6d8f94d0-98d7-4846-98e1-76924fe639d0", "address": "fa:16:3e:5f:aa:8b", "network": {"id": "7755d62b-4569-44fe-b57c-56fd80b68fdb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-822364304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5feea1ae034391a60091ce5fa83f16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d8f94d0-98", "ovs_interfaceid": "6d8f94d0-98d7-4846-98e1-76924fe639d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.725122] env[63241]: DEBUG nova.network.neutron [req-612d32c3-68ad-490a-98ef-9e61624c676f req-43fc70d4-49ea-4f1a-9379-268505bb65a4 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updated VIF entry in instance network info cache for port 4e927ec6-c091-40ea-8276-55eb762b414d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1466.725390] env[63241]: DEBUG nova.network.neutron [req-612d32c3-68ad-490a-98ef-9e61624c676f req-43fc70d4-49ea-4f1a-9379-268505bb65a4 service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updating instance_info_cache with network_info: [{"id": "4e927ec6-c091-40ea-8276-55eb762b414d", "address": "fa:16:3e:13:6a:ad", "network": {"id": "26830501-b055-4be0-bd83-768b6e7a6148", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1917224215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b8ae8ca872a549a6918b0e060d4b4af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e927ec6-c0", "ovs_interfaceid": "4e927ec6-c091-40ea-8276-55eb762b414d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.843408] env[63241]: DEBUG nova.compute.manager [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1466.903469] env[63241]: DEBUG nova.network.neutron [-] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.091701] env[63241]: DEBUG nova.network.neutron [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Successfully created port: 6c7369d6-fc45-46eb-8603-bc0f7c519f03 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1467.119789] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820149, 'name': Rename_Task, 'duration_secs': 0.154008} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.122805] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1467.123100] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae8a98e5-6cf7-4c2c-8cdd-ce990c6da66f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.138245] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1467.138245] env[63241]: value = "task-1820150" [ 1467.138245] env[63241]: _type = "Task" [ 1467.138245] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.147223] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.207748] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52189329-8afe-a04b-1a15-2de3d4b9e907, 'name': SearchDatastore_Task, 'duration_secs': 0.010465} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.207927] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1467.208228] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 81854e13-e0c1-43a9-8529-678d56d57bbf/81854e13-e0c1-43a9-8529-678d56d57bbf.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1467.208775] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd771490-920c-40d5-a68d-d5098a40d7a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.223359] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1467.223359] env[63241]: value = "task-1820151" [ 1467.223359] env[63241]: _type = "Task" [ 1467.223359] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.225208] env[63241]: DEBUG oslo_concurrency.lockutils [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] Releasing lock "refresh_cache-81854e13-e0c1-43a9-8529-678d56d57bbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1467.225208] env[63241]: DEBUG nova.compute.manager [req-4e9de8b2-6ee7-458f-86d9-a3450d793bf5 req-0051464e-93e6-499f-a2b2-d04d69afe2a9 service nova] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Received event network-vif-deleted-b508f7cc-ab52-4957-9c5e-500a7f0b2335 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1467.228430] env[63241]: DEBUG oslo_concurrency.lockutils [req-612d32c3-68ad-490a-98ef-9e61624c676f req-43fc70d4-49ea-4f1a-9379-268505bb65a4 service nova] Releasing lock "refresh_cache-0c72c98b-57f0-44e5-9159-490b27eac3a6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1467.236735] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820151, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.263632] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Successfully updated port: e6a64330-a394-41bb-9270-490bd00a4bf4 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1467.412992] env[63241]: INFO nova.compute.manager [-] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Took 2.63 seconds to deallocate network for instance. [ 1467.579439] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6b9537-f6b9-485c-b6cd-f9bad90980fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.593196] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ff23a8-56ea-49c8-93b2-f20552dd2e98 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.646431] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb515d26-016e-4540-b05b-e6a055128ed4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.651269] env[63241]: DEBUG nova.compute.manager [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received event network-vif-deleted-d73b4859-4b46-4a03-b251-fc28eaadbec7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1467.651978] env[63241]: DEBUG nova.compute.manager [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received event network-vif-deleted-76a97e3a-997e-45eb-9aee-2696857c9aaf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1467.651978] env[63241]: DEBUG nova.compute.manager [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Received event network-vif-deleted-4375bdb7-133a-4c01-8ad3-b93007c0de99 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1467.651978] env[63241]: DEBUG nova.compute.manager [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Received event network-changed-c69d6232-5a3b-404e-b2ce-6724865adf54 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1467.651978] env[63241]: DEBUG nova.compute.manager [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Refreshing instance network info cache due to event network-changed-c69d6232-5a3b-404e-b2ce-6724865adf54. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1467.652553] env[63241]: DEBUG oslo_concurrency.lockutils [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] Acquiring lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.652553] env[63241]: DEBUG oslo_concurrency.lockutils [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] Acquired lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.652553] env[63241]: DEBUG nova.network.neutron [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Refreshing network info cache for port c69d6232-5a3b-404e-b2ce-6724865adf54 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1467.669440] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820150, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.671248] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569f6053-3f15-473c-93ad-264d292f804a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.690151] env[63241]: DEBUG nova.compute.provider_tree [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1467.740267] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820151, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.770546] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "refresh_cache-bef91c1c-a418-4464-ae7b-883ffb7e9695" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.770546] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired lock "refresh_cache-bef91c1c-a418-4464-ae7b-883ffb7e9695" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.770546] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1467.791342] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "343a7e90-5e55-4125-8475-44050f267987" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.791342] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "343a7e90-5e55-4125-8475-44050f267987" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.791342] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "343a7e90-5e55-4125-8475-44050f267987-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.791342] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "343a7e90-5e55-4125-8475-44050f267987-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.791342] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "343a7e90-5e55-4125-8475-44050f267987-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.798605] env[63241]: INFO nova.compute.manager [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Terminating instance [ 1467.805593] env[63241]: DEBUG nova.compute.manager [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1467.805848] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1467.807412] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd96431-320c-406e-a518-8ed4538286b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.816807] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1467.817406] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d2a5880-7607-4396-a142-f7dc50d98779 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.825087] env[63241]: DEBUG oslo_vmware.api [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1467.825087] env[63241]: value = "task-1820152" [ 1467.825087] env[63241]: _type = "Task" [ 1467.825087] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.836403] env[63241]: DEBUG oslo_vmware.api [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820152, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.859132] env[63241]: DEBUG nova.compute.manager [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1467.890779] env[63241]: DEBUG nova.virt.hardware [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1467.891158] env[63241]: DEBUG nova.virt.hardware [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1467.891983] env[63241]: DEBUG nova.virt.hardware [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1467.891983] env[63241]: DEBUG nova.virt.hardware [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1467.892250] env[63241]: DEBUG nova.virt.hardware [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1467.892520] env[63241]: DEBUG nova.virt.hardware [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1467.892946] env[63241]: DEBUG nova.virt.hardware [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1467.893266] env[63241]: DEBUG nova.virt.hardware [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1467.893674] env[63241]: DEBUG nova.virt.hardware [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1467.893990] env[63241]: DEBUG nova.virt.hardware [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1467.894902] env[63241]: DEBUG nova.virt.hardware [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1467.896029] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac235a60-f1fd-4880-91da-6d4a2d98c9c3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.909227] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c605104-0744-4ed2-8e8c-02fc553af367 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.937156] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.152905] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820150, 'name': PowerOnVM_Task, 'duration_secs': 0.951889} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.156019] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1468.156019] env[63241]: INFO nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Took 8.48 seconds to spawn the instance on the hypervisor. [ 1468.156019] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1468.156019] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de625b2-f7a6-47bf-ba4c-5b652bcd3df5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.193588] env[63241]: DEBUG nova.scheduler.client.report [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1468.237278] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820151, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.645822} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.237827] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 81854e13-e0c1-43a9-8529-678d56d57bbf/81854e13-e0c1-43a9-8529-678d56d57bbf.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1468.237948] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1468.238207] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-041a6977-15ad-4f2c-95f5-31327628d268 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.246226] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1468.246226] env[63241]: value = "task-1820153" [ 1468.246226] env[63241]: _type = "Task" [ 1468.246226] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.254989] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820153, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.320867] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1468.336858] env[63241]: DEBUG oslo_vmware.api [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820152, 'name': PowerOffVM_Task, 'duration_secs': 0.254873} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.337291] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1468.337493] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1468.337831] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f2db9b5-10c5-40b8-9068-4cad7f09ebd4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.483259] env[63241]: DEBUG nova.network.neutron [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Updating instance_info_cache with network_info: [{"id": "e6a64330-a394-41bb-9270-490bd00a4bf4", "address": "fa:16:3e:b9:35:5f", "network": {"id": "7755d62b-4569-44fe-b57c-56fd80b68fdb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-822364304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5feea1ae034391a60091ce5fa83f16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6a64330-a3", "ovs_interfaceid": "e6a64330-a394-41bb-9270-490bd00a4bf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.591162] env[63241]: DEBUG nova.network.neutron [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Updated VIF entry in instance network info cache for port c69d6232-5a3b-404e-b2ce-6724865adf54. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1468.592079] env[63241]: DEBUG nova.network.neutron [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Updating instance_info_cache with network_info: [{"id": "c69d6232-5a3b-404e-b2ce-6724865adf54", "address": "fa:16:3e:2e:de:23", "network": {"id": "7f61cd58-880b-488b-8ae6-bc6d0b2a3806", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1752925058-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1a99e9ac8f4fc0bbd763a9d91321af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c118a9ee-84f7-4f09-8a21-05600ed3cc06", "external-id": "nsx-vlan-transportzone-274", "segmentation_id": 274, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc69d6232-5a", "ovs_interfaceid": "c69d6232-5a3b-404e-b2ce-6724865adf54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.680701] env[63241]: INFO nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Took 43.75 seconds to build instance. [ 1468.699016] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.877s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.699605] env[63241]: DEBUG nova.compute.manager [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1468.704555] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.646s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.706132] env[63241]: INFO nova.compute.claims [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1468.759901] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820153, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.204012} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.759981] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1468.760983] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fdf7b4-99f7-4485-86e7-b14655f6033f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.790942] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 81854e13-e0c1-43a9-8529-678d56d57bbf/81854e13-e0c1-43a9-8529-678d56d57bbf.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1468.792074] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b5a00d8-caae-4852-b5a2-9bc29672fcd6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.816834] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1468.816834] env[63241]: value = "task-1820155" [ 1468.816834] env[63241]: _type = "Task" [ 1468.816834] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.824980] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820155, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.987027] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Releasing lock "refresh_cache-bef91c1c-a418-4464-ae7b-883ffb7e9695" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.987438] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Instance network_info: |[{"id": "e6a64330-a394-41bb-9270-490bd00a4bf4", "address": "fa:16:3e:b9:35:5f", "network": {"id": "7755d62b-4569-44fe-b57c-56fd80b68fdb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-822364304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5feea1ae034391a60091ce5fa83f16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6a64330-a3", "ovs_interfaceid": "e6a64330-a394-41bb-9270-490bd00a4bf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1468.987659] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:35:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '72781990-3cb3-42eb-9eb1-4040dedbf66f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6a64330-a394-41bb-9270-490bd00a4bf4', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1469.002983] env[63241]: DEBUG oslo.service.loopingcall [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1469.003144] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1469.003780] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b03b86da-7b5b-4261-9222-28233da239a4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.034277] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1469.034277] env[63241]: value = "task-1820156" [ 1469.034277] env[63241]: _type = "Task" [ 1469.034277] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.048280] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820156, 'name': CreateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.062644] env[63241]: DEBUG nova.compute.manager [req-53d91826-e32d-497b-852b-43570da2a9de req-eb4b1590-f459-4111-a10c-d324720587c6 service nova] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Received event network-vif-plugged-6c7369d6-fc45-46eb-8603-bc0f7c519f03 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1469.063019] env[63241]: DEBUG oslo_concurrency.lockutils [req-53d91826-e32d-497b-852b-43570da2a9de req-eb4b1590-f459-4111-a10c-d324720587c6 service nova] Acquiring lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.064205] env[63241]: DEBUG oslo_concurrency.lockutils [req-53d91826-e32d-497b-852b-43570da2a9de req-eb4b1590-f459-4111-a10c-d324720587c6 service nova] Lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.067200] env[63241]: DEBUG oslo_concurrency.lockutils [req-53d91826-e32d-497b-852b-43570da2a9de req-eb4b1590-f459-4111-a10c-d324720587c6 service nova] Lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.067801] env[63241]: DEBUG nova.compute.manager [req-53d91826-e32d-497b-852b-43570da2a9de req-eb4b1590-f459-4111-a10c-d324720587c6 service nova] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] No waiting events found dispatching network-vif-plugged-6c7369d6-fc45-46eb-8603-bc0f7c519f03 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1469.068111] env[63241]: WARNING nova.compute.manager [req-53d91826-e32d-497b-852b-43570da2a9de req-eb4b1590-f459-4111-a10c-d324720587c6 service nova] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Received unexpected event network-vif-plugged-6c7369d6-fc45-46eb-8603-bc0f7c519f03 for instance with vm_state building and task_state spawning. [ 1469.094495] env[63241]: DEBUG oslo_concurrency.lockutils [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] Releasing lock "refresh_cache-343a7e90-5e55-4125-8475-44050f267987" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1469.095629] env[63241]: DEBUG nova.compute.manager [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Received event network-vif-plugged-e6a64330-a394-41bb-9270-490bd00a4bf4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1469.095881] env[63241]: DEBUG oslo_concurrency.lockutils [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] Acquiring lock "bef91c1c-a418-4464-ae7b-883ffb7e9695-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.096419] env[63241]: DEBUG oslo_concurrency.lockutils [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] Lock "bef91c1c-a418-4464-ae7b-883ffb7e9695-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.096419] env[63241]: DEBUG oslo_concurrency.lockutils [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] Lock "bef91c1c-a418-4464-ae7b-883ffb7e9695-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.096591] env[63241]: DEBUG nova.compute.manager [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] No waiting events found dispatching network-vif-plugged-e6a64330-a394-41bb-9270-490bd00a4bf4 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1469.096693] env[63241]: WARNING nova.compute.manager [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Received unexpected event network-vif-plugged-e6a64330-a394-41bb-9270-490bd00a4bf4 for instance with vm_state building and task_state spawning. [ 1469.096929] env[63241]: DEBUG nova.compute.manager [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Received event network-changed-e6a64330-a394-41bb-9270-490bd00a4bf4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1469.097167] env[63241]: DEBUG nova.compute.manager [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Refreshing instance network info cache due to event network-changed-e6a64330-a394-41bb-9270-490bd00a4bf4. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1469.097639] env[63241]: DEBUG oslo_concurrency.lockutils [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] Acquiring lock "refresh_cache-bef91c1c-a418-4464-ae7b-883ffb7e9695" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.097639] env[63241]: DEBUG oslo_concurrency.lockutils [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] Acquired lock "refresh_cache-bef91c1c-a418-4464-ae7b-883ffb7e9695" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.097918] env[63241]: DEBUG nova.network.neutron [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Refreshing network info cache for port e6a64330-a394-41bb-9270-490bd00a4bf4 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1469.121504] env[63241]: DEBUG nova.network.neutron [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Successfully updated port: 6c7369d6-fc45-46eb-8603-bc0f7c519f03 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1469.183366] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.644s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.207026] env[63241]: DEBUG nova.compute.utils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1469.210261] env[63241]: DEBUG nova.compute.manager [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1469.210261] env[63241]: DEBUG nova.network.neutron [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1469.274349] env[63241]: DEBUG nova.policy [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '254b3a0b92e9481a8fdce01f766369dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd734ba31d184477f9d6cbb2bc3f63706', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1469.329028] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820155, 'name': ReconfigVM_Task, 'duration_secs': 0.321567} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.329028] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 81854e13-e0c1-43a9-8529-678d56d57bbf/81854e13-e0c1-43a9-8529-678d56d57bbf.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1469.329028] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-56eb23e1-27e7-473b-8a61-bbeb7556cf8c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.334967] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1469.334967] env[63241]: value = "task-1820157" [ 1469.334967] env[63241]: _type = "Task" [ 1469.334967] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.347634] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820157, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.357941] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1469.358328] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1469.358524] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Deleting the datastore file [datastore1] 343a7e90-5e55-4125-8475-44050f267987 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1469.358923] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2ff7c14-57d7-4be8-8823-60836c1f3842 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.366651] env[63241]: DEBUG oslo_vmware.api [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1469.366651] env[63241]: value = "task-1820158" [ 1469.366651] env[63241]: _type = "Task" [ 1469.366651] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.375338] env[63241]: DEBUG oslo_vmware.api [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.545532] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820156, 'name': CreateVM_Task, 'duration_secs': 0.467326} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.545532] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1469.546389] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.547128] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.547128] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1469.547320] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d929b77-93f5-41a8-91fe-0bb98cac5b16 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.553454] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1469.553454] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520ad70e-c5a9-47c8-da23-0a9b87419083" [ 1469.553454] env[63241]: _type = "Task" [ 1469.553454] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.562725] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520ad70e-c5a9-47c8-da23-0a9b87419083, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.622546] env[63241]: DEBUG nova.network.neutron [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Successfully created port: bd89b471-e155-45fc-9b21-40bc75f8f48a {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1469.625774] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "refresh_cache-efbe39fa-d581-41ac-b51c-9c94c9839d7a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.625774] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "refresh_cache-efbe39fa-d581-41ac-b51c-9c94c9839d7a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.627370] env[63241]: DEBUG nova.network.neutron [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1469.687026] env[63241]: DEBUG nova.compute.manager [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1469.724043] env[63241]: DEBUG nova.compute.manager [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1469.854156] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820157, 'name': Rename_Task, 'duration_secs': 0.152618} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.854453] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1469.854696] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cfe9e928-5fc8-4f80-b222-2c0c356956b8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.875062] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1469.875062] env[63241]: value = "task-1820159" [ 1469.875062] env[63241]: _type = "Task" [ 1469.875062] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.882643] env[63241]: DEBUG oslo_vmware.api [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266194} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.882901] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1469.883101] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1469.883278] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1469.883471] env[63241]: INFO nova.compute.manager [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: 343a7e90-5e55-4125-8475-44050f267987] Took 2.08 seconds to destroy the instance on the hypervisor. [ 1469.883707] env[63241]: DEBUG oslo.service.loopingcall [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1469.883914] env[63241]: DEBUG nova.compute.manager [-] [instance: 343a7e90-5e55-4125-8475-44050f267987] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1469.884459] env[63241]: DEBUG nova.network.neutron [-] [instance: 343a7e90-5e55-4125-8475-44050f267987] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1469.889778] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820159, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.015101] env[63241]: DEBUG nova.network.neutron [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Updated VIF entry in instance network info cache for port e6a64330-a394-41bb-9270-490bd00a4bf4. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1470.015482] env[63241]: DEBUG nova.network.neutron [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Updating instance_info_cache with network_info: [{"id": "e6a64330-a394-41bb-9270-490bd00a4bf4", "address": "fa:16:3e:b9:35:5f", "network": {"id": "7755d62b-4569-44fe-b57c-56fd80b68fdb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-822364304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b5feea1ae034391a60091ce5fa83f16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6a64330-a3", "ovs_interfaceid": "e6a64330-a394-41bb-9270-490bd00a4bf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.066672] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520ad70e-c5a9-47c8-da23-0a9b87419083, 'name': SearchDatastore_Task, 'duration_secs': 0.016685} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.066970] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.067358] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1470.067640] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.067878] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.068240] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1470.071261] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b67f65c-72cb-4633-99d9-7aaa4c5b74c1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.084357] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1470.084597] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1470.088212] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31582dbf-cdd2-4a4c-9854-5a1909619415 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.094491] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1470.094491] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5240ac44-d78d-9b90-e4a1-0affbe4819f1" [ 1470.094491] env[63241]: _type = "Task" [ 1470.094491] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.103146] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5240ac44-d78d-9b90-e4a1-0affbe4819f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.184172] env[63241]: DEBUG nova.network.neutron [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1470.217425] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.300974] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "44508cc6-c576-4c30-8559-75118ceba02a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.301254] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "44508cc6-c576-4c30-8559-75118ceba02a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.301452] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "44508cc6-c576-4c30-8559-75118ceba02a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.301628] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "44508cc6-c576-4c30-8559-75118ceba02a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.301785] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "44508cc6-c576-4c30-8559-75118ceba02a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.304046] env[63241]: INFO nova.compute.manager [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Terminating instance [ 1470.307569] env[63241]: DEBUG nova.compute.manager [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1470.307837] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1470.309025] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451ebde3-a7fe-48ad-91f2-776eabffa18d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.317896] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1470.320700] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8649dc8d-eb29-4642-bdb8-738c6af6330b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.327989] env[63241]: DEBUG oslo_vmware.api [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1470.327989] env[63241]: value = "task-1820160" [ 1470.327989] env[63241]: _type = "Task" [ 1470.327989] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.343849] env[63241]: DEBUG oslo_vmware.api [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820160, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.359041] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51080a7a-bf3b-4f49-8374-f51eff4051cc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.367576] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1c09c8-6c5b-4f54-9d4c-6d19b8314ee8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.407686] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99d5dc1-0ca2-44d9-bf47-c8e00968ac04 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.415907] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820159, 'name': PowerOnVM_Task, 'duration_secs': 0.537042} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.418175] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1470.418499] env[63241]: INFO nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Took 8.05 seconds to spawn the instance on the hypervisor. [ 1470.418572] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1470.419393] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53873cb-8663-4624-b411-7a5962a47b2e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.422981] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24b8eea-880c-4cf4-9b98-0fda32c442d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.442334] env[63241]: DEBUG nova.compute.provider_tree [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.473261] env[63241]: DEBUG nova.network.neutron [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Updating instance_info_cache with network_info: [{"id": "6c7369d6-fc45-46eb-8603-bc0f7c519f03", "address": "fa:16:3e:94:d1:0c", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c7369d6-fc", "ovs_interfaceid": "6c7369d6-fc45-46eb-8603-bc0f7c519f03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.522029] env[63241]: DEBUG oslo_concurrency.lockutils [req-eba7f31f-6626-4d19-ad69-0222d54dd713 req-fd87e008-8311-45a4-bb77-07a90f48cc05 service nova] Releasing lock "refresh_cache-bef91c1c-a418-4464-ae7b-883ffb7e9695" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.605598] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5240ac44-d78d-9b90-e4a1-0affbe4819f1, 'name': SearchDatastore_Task, 'duration_secs': 0.018744} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.606577] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e796622a-6689-4f43-8ba4-63a948894c8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.612138] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1470.612138] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c419ef-4399-f891-b4ca-b843f1bd71ce" [ 1470.612138] env[63241]: _type = "Task" [ 1470.612138] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.620810] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c419ef-4399-f891-b4ca-b843f1bd71ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.753026] env[63241]: DEBUG nova.compute.manager [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1470.765886] env[63241]: DEBUG nova.network.neutron [-] [instance: 343a7e90-5e55-4125-8475-44050f267987] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.791395] env[63241]: DEBUG nova.virt.hardware [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1470.791539] env[63241]: DEBUG nova.virt.hardware [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1470.791624] env[63241]: DEBUG nova.virt.hardware [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1470.791822] env[63241]: DEBUG nova.virt.hardware [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1470.791985] env[63241]: DEBUG nova.virt.hardware [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1470.792244] env[63241]: DEBUG nova.virt.hardware [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1470.792409] env[63241]: DEBUG nova.virt.hardware [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1470.792570] env[63241]: DEBUG nova.virt.hardware [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1470.792742] env[63241]: DEBUG nova.virt.hardware [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1470.792904] env[63241]: DEBUG nova.virt.hardware [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1470.793115] env[63241]: DEBUG nova.virt.hardware [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1470.794322] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d589c5-68af-4d42-a478-f405bb1e54fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.803034] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2caccac4-78e0-4551-9145-daa31cdd2e08 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.838216] env[63241]: DEBUG oslo_vmware.api [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820160, 'name': PowerOffVM_Task, 'duration_secs': 0.21086} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.838216] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1470.838379] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1470.838495] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72ffcec2-6db4-4dd6-aa2f-95b174a466e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.941401] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1470.941878] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1470.941980] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Deleting the datastore file [datastore1] 44508cc6-c576-4c30-8559-75118ceba02a {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1470.942279] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a7d2d3b-710e-4cc6-8d11-48b3662ecb6c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.949701] env[63241]: DEBUG nova.scheduler.client.report [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1470.957572] env[63241]: INFO nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Took 43.06 seconds to build instance. [ 1470.960593] env[63241]: DEBUG oslo_vmware.api [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1470.960593] env[63241]: value = "task-1820162" [ 1470.960593] env[63241]: _type = "Task" [ 1470.960593] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.969058] env[63241]: DEBUG oslo_vmware.api [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820162, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.975577] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "refresh_cache-efbe39fa-d581-41ac-b51c-9c94c9839d7a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.975871] env[63241]: DEBUG nova.compute.manager [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Instance network_info: |[{"id": "6c7369d6-fc45-46eb-8603-bc0f7c519f03", "address": "fa:16:3e:94:d1:0c", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c7369d6-fc", "ovs_interfaceid": "6c7369d6-fc45-46eb-8603-bc0f7c519f03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1470.976266] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:d1:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c7369d6-fc45-46eb-8603-bc0f7c519f03', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1470.984689] env[63241]: DEBUG oslo.service.loopingcall [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1470.985513] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1470.985744] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fcc9fd96-7468-44ae-bfef-711196d8f215 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.008534] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1471.008534] env[63241]: value = "task-1820163" [ 1471.008534] env[63241]: _type = "Task" [ 1471.008534] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.017237] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820163, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.122693] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c419ef-4399-f891-b4ca-b843f1bd71ce, 'name': SearchDatastore_Task, 'duration_secs': 0.015022} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.124149] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.124410] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] bef91c1c-a418-4464-ae7b-883ffb7e9695/bef91c1c-a418-4464-ae7b-883ffb7e9695.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1471.127166] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae24153d-bc7b-4336-b9c9-fae1c3b1408e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.129752] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.130343] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.135763] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1471.135763] env[63241]: value = "task-1820164" [ 1471.135763] env[63241]: _type = "Task" [ 1471.135763] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.146174] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.272126] env[63241]: INFO nova.compute.manager [-] [instance: 343a7e90-5e55-4125-8475-44050f267987] Took 1.39 seconds to deallocate network for instance. [ 1471.386664] env[63241]: DEBUG nova.network.neutron [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Successfully updated port: bd89b471-e155-45fc-9b21-40bc75f8f48a {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1471.412994] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.413328] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.456238] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.752s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.457490] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.452s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.459793] env[63241]: INFO nova.compute.claims [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1471.464500] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "81854e13-e0c1-43a9-8529-678d56d57bbf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.863s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.474333] env[63241]: DEBUG oslo_vmware.api [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820162, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308941} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.477690] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1471.477690] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1471.477690] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1471.477690] env[63241]: INFO nova.compute.manager [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1471.477690] env[63241]: DEBUG oslo.service.loopingcall [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1471.477690] env[63241]: DEBUG nova.compute.manager [-] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1471.477690] env[63241]: DEBUG nova.network.neutron [-] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1471.527496] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820163, 'name': CreateVM_Task, 'duration_secs': 0.447817} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.527809] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1471.528587] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.528805] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.529152] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1471.529467] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5214673d-49e9-41ce-a712-4e425b3b945f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.534916] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1471.534916] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523a3730-30fd-ed8e-35f3-8b42a647d35c" [ 1471.534916] env[63241]: _type = "Task" [ 1471.534916] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.545050] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523a3730-30fd-ed8e-35f3-8b42a647d35c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.646660] env[63241]: DEBUG nova.compute.manager [req-b0e0e84a-c502-4fdc-8fc1-d35752f702b2 req-5cab26f0-c2f7-467d-a625-47006607aebb service nova] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Received event network-changed-6c7369d6-fc45-46eb-8603-bc0f7c519f03 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1471.647114] env[63241]: DEBUG nova.compute.manager [req-b0e0e84a-c502-4fdc-8fc1-d35752f702b2 req-5cab26f0-c2f7-467d-a625-47006607aebb service nova] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Refreshing instance network info cache due to event network-changed-6c7369d6-fc45-46eb-8603-bc0f7c519f03. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1471.647694] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0e0e84a-c502-4fdc-8fc1-d35752f702b2 req-5cab26f0-c2f7-467d-a625-47006607aebb service nova] Acquiring lock "refresh_cache-efbe39fa-d581-41ac-b51c-9c94c9839d7a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.647990] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0e0e84a-c502-4fdc-8fc1-d35752f702b2 req-5cab26f0-c2f7-467d-a625-47006607aebb service nova] Acquired lock "refresh_cache-efbe39fa-d581-41ac-b51c-9c94c9839d7a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.648432] env[63241]: DEBUG nova.network.neutron [req-b0e0e84a-c502-4fdc-8fc1-d35752f702b2 req-5cab26f0-c2f7-467d-a625-47006607aebb service nova] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Refreshing network info cache for port 6c7369d6-fc45-46eb-8603-bc0f7c519f03 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1471.655409] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820164, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.781845] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.892092] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquiring lock "refresh_cache-72a11582-1fad-428a-bde1-e9d0b05731cd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.892092] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquired lock "refresh_cache-72a11582-1fad-428a-bde1-e9d0b05731cd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.892092] env[63241]: DEBUG nova.network.neutron [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1471.969336] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquiring lock "d9f91486-d543-43c8-929b-5f077c1320f5" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.969336] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "d9f91486-d543-43c8-929b-5f077c1320f5" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.969336] env[63241]: DEBUG nova.compute.manager [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1472.045799] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523a3730-30fd-ed8e-35f3-8b42a647d35c, 'name': SearchDatastore_Task, 'duration_secs': 0.056373} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.046588] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.046850] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1472.047333] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1472.047333] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.047475] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1472.048836] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21fa94de-3926-487e-8e64-cfd0ce56e6c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.059269] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1472.060105] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1472.060333] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b92c9a8d-d3c9-4ff8-9851-5f22901d8d37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.068354] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1472.068354] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5232ffcf-1f79-f9ce-4c9d-4ba7390b5aab" [ 1472.068354] env[63241]: _type = "Task" [ 1472.068354] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.076668] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5232ffcf-1f79-f9ce-4c9d-4ba7390b5aab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.148476] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.747652} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.148755] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] bef91c1c-a418-4464-ae7b-883ffb7e9695/bef91c1c-a418-4464-ae7b-883ffb7e9695.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1472.148993] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1472.149259] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34ae3738-690d-4eca-b1f3-46217f0dd71a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.156524] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1472.156524] env[63241]: value = "task-1820165" [ 1472.156524] env[63241]: _type = "Task" [ 1472.156524] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.164996] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820165, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.385600] env[63241]: DEBUG nova.network.neutron [req-b0e0e84a-c502-4fdc-8fc1-d35752f702b2 req-5cab26f0-c2f7-467d-a625-47006607aebb service nova] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Updated VIF entry in instance network info cache for port 6c7369d6-fc45-46eb-8603-bc0f7c519f03. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1472.385600] env[63241]: DEBUG nova.network.neutron [req-b0e0e84a-c502-4fdc-8fc1-d35752f702b2 req-5cab26f0-c2f7-467d-a625-47006607aebb service nova] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Updating instance_info_cache with network_info: [{"id": "6c7369d6-fc45-46eb-8603-bc0f7c519f03", "address": "fa:16:3e:94:d1:0c", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c7369d6-fc", "ovs_interfaceid": "6c7369d6-fc45-46eb-8603-bc0f7c519f03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.428721] env[63241]: DEBUG nova.network.neutron [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1472.471507] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "d9f91486-d543-43c8-929b-5f077c1320f5" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1472.475083] env[63241]: DEBUG nova.compute.manager [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1472.507561] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.569144] env[63241]: DEBUG nova.network.neutron [-] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.575894] env[63241]: DEBUG nova.network.neutron [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Updating instance_info_cache with network_info: [{"id": "bd89b471-e155-45fc-9b21-40bc75f8f48a", "address": "fa:16:3e:6e:46:88", "network": {"id": "9e6ff769-6f81-4d69-9710-49158736659b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1337177874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d734ba31d184477f9d6cbb2bc3f63706", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd89b471-e1", "ovs_interfaceid": "bd89b471-e155-45fc-9b21-40bc75f8f48a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.583392] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5232ffcf-1f79-f9ce-4c9d-4ba7390b5aab, 'name': SearchDatastore_Task, 'duration_secs': 0.014337} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.584384] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac8acfaa-3fa3-4802-80f2-6ac68defbc56 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.591502] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1472.591502] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5296805a-8f63-b2bc-1c71-0acc5d06fefa" [ 1472.591502] env[63241]: _type = "Task" [ 1472.591502] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.604314] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5296805a-8f63-b2bc-1c71-0acc5d06fefa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.670087] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820165, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155683} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.672846] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1472.674180] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68a7a01-86e6-4325-b1fe-b626aabcd48c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.698743] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] bef91c1c-a418-4464-ae7b-883ffb7e9695/bef91c1c-a418-4464-ae7b-883ffb7e9695.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1472.702200] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-079a1164-287d-4259-8451-d9c8fffb36bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.725385] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1472.725385] env[63241]: value = "task-1820166" [ 1472.725385] env[63241]: _type = "Task" [ 1472.725385] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.737369] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820166, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.887089] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0e0e84a-c502-4fdc-8fc1-d35752f702b2 req-5cab26f0-c2f7-467d-a625-47006607aebb service nova] Releasing lock "refresh_cache-efbe39fa-d581-41ac-b51c-9c94c9839d7a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.887364] env[63241]: DEBUG nova.compute.manager [req-b0e0e84a-c502-4fdc-8fc1-d35752f702b2 req-5cab26f0-c2f7-467d-a625-47006607aebb service nova] [instance: 343a7e90-5e55-4125-8475-44050f267987] Received event network-vif-deleted-c69d6232-5a3b-404e-b2ce-6724865adf54 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1472.982930] env[63241]: DEBUG nova.compute.utils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1472.984391] env[63241]: DEBUG nova.compute.manager [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1472.984765] env[63241]: DEBUG nova.network.neutron [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1473.033177] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e15969a-e44b-46f4-8783-68faed54aa36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.038519] env[63241]: DEBUG nova.policy [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '909c078d9d844872b692ef74ad801e46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81e16d69973c40f69085e32b5c24e1db', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1473.046009] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c5479a-091a-407f-89b8-ba70bb5c6587 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.083596] env[63241]: INFO nova.compute.manager [-] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Took 1.61 seconds to deallocate network for instance. [ 1473.084140] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Releasing lock "refresh_cache-72a11582-1fad-428a-bde1-e9d0b05731cd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.084465] env[63241]: DEBUG nova.compute.manager [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Instance network_info: |[{"id": "bd89b471-e155-45fc-9b21-40bc75f8f48a", "address": "fa:16:3e:6e:46:88", "network": {"id": "9e6ff769-6f81-4d69-9710-49158736659b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1337177874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d734ba31d184477f9d6cbb2bc3f63706", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd89b471-e1", "ovs_interfaceid": "bd89b471-e155-45fc-9b21-40bc75f8f48a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1473.091033] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:46:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '438671d0-9468-4e44-84c1-4c0ebaa743e0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd89b471-e155-45fc-9b21-40bc75f8f48a', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1473.101657] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Creating folder: Project (d734ba31d184477f9d6cbb2bc3f63706). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1473.102542] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07aa705d-b6d7-478d-8b3f-c450c378e65b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.106289] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8856913f-95fa-4039-bcd6-3e43183cfe99 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.120136] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836c2b68-01bb-41ad-820f-b3424c45b8dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.124956] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5296805a-8f63-b2bc-1c71-0acc5d06fefa, 'name': SearchDatastore_Task, 'duration_secs': 0.023802} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.126474] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.126744] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] efbe39fa-d581-41ac-b51c-9c94c9839d7a/efbe39fa-d581-41ac-b51c-9c94c9839d7a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1473.127043] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Created folder: Project (d734ba31d184477f9d6cbb2bc3f63706) in parent group-v376927. [ 1473.127213] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Creating folder: Instances. Parent ref: group-v377040. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1473.127887] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46d54319-6bff-40d9-8687-8ea4faa3bbc7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.130175] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30e7d206-ffd8-4b99-ae31-3232480fdc4d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.140598] env[63241]: DEBUG nova.compute.provider_tree [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.150052] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1473.150052] env[63241]: value = "task-1820168" [ 1473.150052] env[63241]: _type = "Task" [ 1473.150052] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.153152] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Created folder: Instances in parent group-v377040. [ 1473.153402] env[63241]: DEBUG oslo.service.loopingcall [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1473.154011] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1473.154261] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8579a125-d927-42fb-b367-b681e21655fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.173339] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.179808] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1473.179808] env[63241]: value = "task-1820170" [ 1473.179808] env[63241]: _type = "Task" [ 1473.179808] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.189201] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820170, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.235647] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820166, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.355617] env[63241]: DEBUG nova.network.neutron [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Successfully created port: c65a81e1-9494-4ac5-b371-ba4abad9643b {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1473.458836] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquiring lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.459530] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.459942] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquiring lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.460801] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.460801] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.465333] env[63241]: INFO nova.compute.manager [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Terminating instance [ 1473.467846] env[63241]: DEBUG nova.compute.manager [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1473.468099] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1473.468976] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a009589b-f83f-4a7e-a493-2592d37c3b5b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.479142] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1473.479562] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-143aeffe-e8d6-45c9-af1a-fa33a1eac00b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.488351] env[63241]: DEBUG nova.compute.manager [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1473.496105] env[63241]: DEBUG oslo_vmware.api [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for the task: (returnval){ [ 1473.496105] env[63241]: value = "task-1820171" [ 1473.496105] env[63241]: _type = "Task" [ 1473.496105] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.507209] env[63241]: DEBUG oslo_vmware.api [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1820171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.592422] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.648022] env[63241]: DEBUG nova.scheduler.client.report [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1473.660653] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820168, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.695358] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820170, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.738898] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820166, 'name': ReconfigVM_Task, 'duration_secs': 0.645549} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.739588] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Reconfigured VM instance instance-00000025 to attach disk [datastore1] bef91c1c-a418-4464-ae7b-883ffb7e9695/bef91c1c-a418-4464-ae7b-883ffb7e9695.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1473.742022] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9741e858-c464-4f63-ac81-ed6503aed500 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.754672] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1473.754672] env[63241]: value = "task-1820172" [ 1473.754672] env[63241]: _type = "Task" [ 1473.754672] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.767516] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820172, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.924483] env[63241]: DEBUG nova.compute.manager [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Received event network-vif-plugged-bd89b471-e155-45fc-9b21-40bc75f8f48a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1473.924483] env[63241]: DEBUG oslo_concurrency.lockutils [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] Acquiring lock "72a11582-1fad-428a-bde1-e9d0b05731cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.924483] env[63241]: DEBUG oslo_concurrency.lockutils [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] Lock "72a11582-1fad-428a-bde1-e9d0b05731cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.924483] env[63241]: DEBUG oslo_concurrency.lockutils [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] Lock "72a11582-1fad-428a-bde1-e9d0b05731cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.924483] env[63241]: DEBUG nova.compute.manager [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] No waiting events found dispatching network-vif-plugged-bd89b471-e155-45fc-9b21-40bc75f8f48a {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1473.924483] env[63241]: WARNING nova.compute.manager [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Received unexpected event network-vif-plugged-bd89b471-e155-45fc-9b21-40bc75f8f48a for instance with vm_state building and task_state spawning. [ 1473.924956] env[63241]: DEBUG nova.compute.manager [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Received event network-changed-bd89b471-e155-45fc-9b21-40bc75f8f48a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1473.925295] env[63241]: DEBUG nova.compute.manager [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Refreshing instance network info cache due to event network-changed-bd89b471-e155-45fc-9b21-40bc75f8f48a. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1473.925620] env[63241]: DEBUG oslo_concurrency.lockutils [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] Acquiring lock "refresh_cache-72a11582-1fad-428a-bde1-e9d0b05731cd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.925895] env[63241]: DEBUG oslo_concurrency.lockutils [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] Acquired lock "refresh_cache-72a11582-1fad-428a-bde1-e9d0b05731cd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.926203] env[63241]: DEBUG nova.network.neutron [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Refreshing network info cache for port bd89b471-e155-45fc-9b21-40bc75f8f48a {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1474.010240] env[63241]: DEBUG oslo_vmware.api [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1820171, 'name': PowerOffVM_Task, 'duration_secs': 0.474297} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.010524] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1474.010691] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1474.010956] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55a4c069-90b1-4a1c-b317-e026326e14a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.155578] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.698s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.156494] env[63241]: DEBUG nova.compute.manager [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1474.162021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.532s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.162021] env[63241]: DEBUG nova.objects.instance [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Lazy-loading 'resources' on Instance uuid e2758650-2762-49f6-a678-f55425a89994 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1474.172884] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820168, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662939} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.174079] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] efbe39fa-d581-41ac-b51c-9c94c9839d7a/efbe39fa-d581-41ac-b51c-9c94c9839d7a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1474.174378] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1474.174693] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a95f0bfa-4e3e-4e03-a763-41394a17f9b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.193674] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1474.193674] env[63241]: value = "task-1820174" [ 1474.193674] env[63241]: _type = "Task" [ 1474.193674] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.204213] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820170, 'name': CreateVM_Task, 'duration_secs': 0.759311} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.204982] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1474.205797] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.206099] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.206341] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1474.206616] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b28c2a01-61bd-4f0a-b638-34f1e5c1cc46 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.214474] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820174, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.216144] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for the task: (returnval){ [ 1474.216144] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5229c4a7-16e1-11e7-9cfc-b4dcb7c7b178" [ 1474.216144] env[63241]: _type = "Task" [ 1474.216144] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.228510] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5229c4a7-16e1-11e7-9cfc-b4dcb7c7b178, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.264291] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820172, 'name': Rename_Task, 'duration_secs': 0.262619} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.264689] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1474.264953] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2471105-fcd9-431d-8385-e575433978dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.274619] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1474.274619] env[63241]: value = "task-1820175" [ 1474.274619] env[63241]: _type = "Task" [ 1474.274619] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.286699] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820175, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.338226] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1474.338604] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1474.338808] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Deleting the datastore file [datastore1] a88ba00d-6644-4ecc-8603-a7d79ce8a4b4 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1474.339399] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5da4312-b776-464e-ad07-54f9f98f6bf3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.353094] env[63241]: DEBUG oslo_vmware.api [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for the task: (returnval){ [ 1474.353094] env[63241]: value = "task-1820176" [ 1474.353094] env[63241]: _type = "Task" [ 1474.353094] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.364174] env[63241]: DEBUG oslo_vmware.api [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1820176, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.505331] env[63241]: DEBUG nova.compute.manager [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1474.546277] env[63241]: DEBUG nova.virt.hardware [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1474.546701] env[63241]: DEBUG nova.virt.hardware [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1474.546871] env[63241]: DEBUG nova.virt.hardware [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1474.547073] env[63241]: DEBUG nova.virt.hardware [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1474.547226] env[63241]: DEBUG nova.virt.hardware [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1474.547398] env[63241]: DEBUG nova.virt.hardware [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1474.547747] env[63241]: DEBUG nova.virt.hardware [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1474.548043] env[63241]: DEBUG nova.virt.hardware [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1474.548249] env[63241]: DEBUG nova.virt.hardware [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1474.548474] env[63241]: DEBUG nova.virt.hardware [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1474.548669] env[63241]: DEBUG nova.virt.hardware [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1474.549641] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9871a4-5506-4c43-8f4b-cf18bad94f85 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.563376] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1393b512-2210-4be0-8dd5-0204955a7408 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.664624] env[63241]: DEBUG nova.compute.utils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1474.670359] env[63241]: DEBUG nova.compute.manager [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1474.670638] env[63241]: DEBUG nova.network.neutron [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1474.713020] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820174, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.168938} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.713329] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1474.714170] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794dfeb6-5895-407e-84ac-81a859ff3145 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.728370] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5229c4a7-16e1-11e7-9cfc-b4dcb7c7b178, 'name': SearchDatastore_Task, 'duration_secs': 0.020325} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.746993] env[63241]: DEBUG nova.policy [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78657a2bc34d4bb9922678ed287530f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18094134f49b4e84b83e97631bc22903', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1474.748215] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.748484] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1474.748840] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.749070] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.749355] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1474.761793] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] efbe39fa-d581-41ac-b51c-9c94c9839d7a/efbe39fa-d581-41ac-b51c-9c94c9839d7a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1474.761793] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63ac0c93-3823-4a4c-8de3-21f29933e478 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.762773] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c7cb7a4-c7db-455b-aa82-362782f2f8e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.794543] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1474.794543] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1474.799848] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8fc23be-b076-491d-a3b0-6ce9f6365731 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.803268] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820175, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.809800] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1474.809800] env[63241]: value = "task-1820177" [ 1474.809800] env[63241]: _type = "Task" [ 1474.809800] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.817166] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for the task: (returnval){ [ 1474.817166] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524d3949-d14e-5c6e-a19e-84ac8cb2a389" [ 1474.817166] env[63241]: _type = "Task" [ 1474.817166] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.825528] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820177, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.831913] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524d3949-d14e-5c6e-a19e-84ac8cb2a389, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.869996] env[63241]: DEBUG oslo_vmware.api [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Task: {'id': task-1820176, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374698} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.873832] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1474.877029] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1474.877029] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1474.877029] env[63241]: INFO nova.compute.manager [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Took 1.41 seconds to destroy the instance on the hypervisor. [ 1474.877029] env[63241]: DEBUG oslo.service.loopingcall [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1474.877029] env[63241]: DEBUG nova.compute.manager [-] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1474.877029] env[63241]: DEBUG nova.network.neutron [-] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1474.911596] env[63241]: DEBUG nova.network.neutron [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Updated VIF entry in instance network info cache for port bd89b471-e155-45fc-9b21-40bc75f8f48a. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1474.911961] env[63241]: DEBUG nova.network.neutron [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Updating instance_info_cache with network_info: [{"id": "bd89b471-e155-45fc-9b21-40bc75f8f48a", "address": "fa:16:3e:6e:46:88", "network": {"id": "9e6ff769-6f81-4d69-9710-49158736659b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1337177874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d734ba31d184477f9d6cbb2bc3f63706", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd89b471-e1", "ovs_interfaceid": "bd89b471-e155-45fc-9b21-40bc75f8f48a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.171017] env[63241]: DEBUG nova.compute.manager [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1475.290502] env[63241]: DEBUG nova.network.neutron [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Successfully created port: 032d2ad1-d0e9-4e9f-9ab4-654170139b7a {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1475.301977] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820175, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.327094] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820177, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.341374] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524d3949-d14e-5c6e-a19e-84ac8cb2a389, 'name': SearchDatastore_Task, 'duration_secs': 0.031844} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.342326] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0ca3338-10ed-4716-a659-3b1c9d752685 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.353913] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for the task: (returnval){ [ 1475.353913] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52642b72-3f71-477c-5a16-c8d96237ffa3" [ 1475.353913] env[63241]: _type = "Task" [ 1475.353913] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.364745] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52642b72-3f71-477c-5a16-c8d96237ffa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.417249] env[63241]: DEBUG oslo_concurrency.lockutils [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] Releasing lock "refresh_cache-72a11582-1fad-428a-bde1-e9d0b05731cd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.417635] env[63241]: DEBUG nova.compute.manager [req-2187cfc6-1b27-4c3f-b51b-78e7c7ffb4d0 req-65dc48ba-21ae-4f2f-b914-6c3a9395a2f0 service nova] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Received event network-vif-deleted-46d3ef2e-5410-4151-8ec8-30a6f2e5e221 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1475.467774] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02adff15-cc99-41f9-8d69-ba373306aeef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.477828] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34dbfced-87b4-4555-b72e-29b890aedcad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.516126] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b59e2b1-4f1b-4793-a7cd-0078a8c92221 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.527252] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0455a4-670c-4278-be06-d15d0e2fb3f8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.546392] env[63241]: DEBUG nova.compute.provider_tree [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.568740] env[63241]: DEBUG nova.network.neutron [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Successfully updated port: c65a81e1-9494-4ac5-b371-ba4abad9643b {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1475.797803] env[63241]: DEBUG oslo_vmware.api [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820175, 'name': PowerOnVM_Task, 'duration_secs': 1.179478} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.798162] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1475.798406] env[63241]: INFO nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Took 10.71 seconds to spawn the instance on the hypervisor. [ 1475.798612] env[63241]: DEBUG nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1475.799482] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fbee0d-665f-4cbf-b955-c70c541cc6f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.823830] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820177, 'name': ReconfigVM_Task, 'duration_secs': 0.720876} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.825885] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Reconfigured VM instance instance-00000026 to attach disk [datastore1] efbe39fa-d581-41ac-b51c-9c94c9839d7a/efbe39fa-d581-41ac-b51c-9c94c9839d7a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1475.825885] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d35593c1-aed9-47ab-95e5-da2cc2cbc4fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.835613] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1475.835613] env[63241]: value = "task-1820178" [ 1475.835613] env[63241]: _type = "Task" [ 1475.835613] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.845271] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820178, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.875892] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52642b72-3f71-477c-5a16-c8d96237ffa3, 'name': SearchDatastore_Task, 'duration_secs': 0.017001} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.875892] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.875892] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 72a11582-1fad-428a-bde1-e9d0b05731cd/72a11582-1fad-428a-bde1-e9d0b05731cd.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1475.875892] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd9f371b-738d-4f06-96bc-2ae9125970e8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.886025] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for the task: (returnval){ [ 1475.886025] env[63241]: value = "task-1820179" [ 1475.886025] env[63241]: _type = "Task" [ 1475.886025] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.899428] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820179, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.050632] env[63241]: DEBUG nova.scheduler.client.report [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1476.069600] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquiring lock "refresh_cache-0b7c72e0-79b9-4435-9676-7a0e9afaf936" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.069847] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquired lock "refresh_cache-0b7c72e0-79b9-4435-9676-7a0e9afaf936" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.070078] env[63241]: DEBUG nova.network.neutron [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1476.184506] env[63241]: DEBUG nova.compute.manager [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1476.214213] env[63241]: DEBUG nova.virt.hardware [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1476.214213] env[63241]: DEBUG nova.virt.hardware [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1476.214213] env[63241]: DEBUG nova.virt.hardware [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1476.214213] env[63241]: DEBUG nova.virt.hardware [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1476.214213] env[63241]: DEBUG nova.virt.hardware [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1476.214213] env[63241]: DEBUG nova.virt.hardware [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1476.214966] env[63241]: DEBUG nova.virt.hardware [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1476.215359] env[63241]: DEBUG nova.virt.hardware [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1476.215700] env[63241]: DEBUG nova.virt.hardware [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1476.216044] env[63241]: DEBUG nova.virt.hardware [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1476.216498] env[63241]: DEBUG nova.virt.hardware [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1476.218366] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6146fac-bf0f-4292-8352-e1d672f7c16d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.232116] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ead014-fb7c-474e-ae36-be82d9196ea4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.320621] env[63241]: INFO nova.compute.manager [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Took 47.08 seconds to build instance. [ 1476.348026] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820178, 'name': Rename_Task, 'duration_secs': 0.315712} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.348026] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1476.348026] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3965b9f6-f7f6-4ee5-ade2-a987fda6e305 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.357040] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1476.357040] env[63241]: value = "task-1820180" [ 1476.357040] env[63241]: _type = "Task" [ 1476.357040] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.373453] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820180, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.397584] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820179, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.411981] env[63241]: DEBUG nova.network.neutron [-] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.559061] env[63241]: DEBUG oslo_concurrency.lockutils [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.398s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.560166] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.878s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.560813] env[63241]: DEBUG nova.objects.instance [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1476.589704] env[63241]: INFO nova.scheduler.client.report [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Deleted allocations for instance e2758650-2762-49f6-a678-f55425a89994 [ 1476.654668] env[63241]: DEBUG nova.network.neutron [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1476.825994] env[63241]: DEBUG oslo_concurrency.lockutils [None req-80c7be19-55eb-4f23-9518-a877e9cc9b3a tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "bef91c1c-a418-4464-ae7b-883ffb7e9695" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.180s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.873370] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820180, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.901020] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820179, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.705976} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.901020] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 72a11582-1fad-428a-bde1-e9d0b05731cd/72a11582-1fad-428a-bde1-e9d0b05731cd.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1476.901020] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1476.901020] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cadc18fd-a6ed-4522-ae59-2d0cedad1527 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.911320] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for the task: (returnval){ [ 1476.911320] env[63241]: value = "task-1820181" [ 1476.911320] env[63241]: _type = "Task" [ 1476.911320] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.912425] env[63241]: INFO nova.compute.manager [-] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Took 2.04 seconds to deallocate network for instance. [ 1476.935341] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820181, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.954582] env[63241]: DEBUG nova.compute.manager [req-3c95978d-d267-4f20-888f-bfd00517e663 req-4b51dcde-9c62-4e6f-999a-1fd1c91b2d24 service nova] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Received event network-vif-deleted-279c7c67-cf23-442a-accf-544adeda8d12 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1476.985766] env[63241]: DEBUG nova.compute.manager [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Received event network-vif-plugged-c65a81e1-9494-4ac5-b371-ba4abad9643b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1476.986075] env[63241]: DEBUG oslo_concurrency.lockutils [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] Acquiring lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.986355] env[63241]: DEBUG oslo_concurrency.lockutils [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] Lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.986535] env[63241]: DEBUG oslo_concurrency.lockutils [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] Lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.986706] env[63241]: DEBUG nova.compute.manager [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] No waiting events found dispatching network-vif-plugged-c65a81e1-9494-4ac5-b371-ba4abad9643b {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1476.986874] env[63241]: WARNING nova.compute.manager [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Received unexpected event network-vif-plugged-c65a81e1-9494-4ac5-b371-ba4abad9643b for instance with vm_state building and task_state spawning. [ 1476.987053] env[63241]: DEBUG nova.compute.manager [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Received event network-changed-c65a81e1-9494-4ac5-b371-ba4abad9643b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1476.987217] env[63241]: DEBUG nova.compute.manager [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Refreshing instance network info cache due to event network-changed-c65a81e1-9494-4ac5-b371-ba4abad9643b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1476.987381] env[63241]: DEBUG oslo_concurrency.lockutils [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] Acquiring lock "refresh_cache-0b7c72e0-79b9-4435-9676-7a0e9afaf936" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.989929] env[63241]: DEBUG nova.network.neutron [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Updating instance_info_cache with network_info: [{"id": "c65a81e1-9494-4ac5-b371-ba4abad9643b", "address": "fa:16:3e:60:e0:b4", "network": {"id": "31376013-55db-408f-96b1-73d026cbe5ef", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-482030346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81e16d69973c40f69085e32b5c24e1db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc65a81e1-94", "ovs_interfaceid": "c65a81e1-9494-4ac5-b371-ba4abad9643b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.098131] env[63241]: DEBUG oslo_concurrency.lockutils [None req-375e597e-c434-4c3c-84e0-c9a411424a21 tempest-ImagesNegativeTestJSON-207112561 tempest-ImagesNegativeTestJSON-207112561-project-member] Lock "e2758650-2762-49f6-a678-f55425a89994" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.737s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.128323] env[63241]: DEBUG nova.network.neutron [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Successfully updated port: 032d2ad1-d0e9-4e9f-9ab4-654170139b7a {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1477.329913] env[63241]: DEBUG nova.compute.manager [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1477.369532] env[63241]: DEBUG oslo_vmware.api [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820180, 'name': PowerOnVM_Task, 'duration_secs': 0.621721} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.369812] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1477.370021] env[63241]: INFO nova.compute.manager [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Took 9.51 seconds to spawn the instance on the hypervisor. [ 1477.370203] env[63241]: DEBUG nova.compute.manager [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1477.371032] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87bbf64-312a-4459-82eb-257ffc9be0c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.423364] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820181, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090243} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.423668] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1477.424520] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcd9e46-9b7d-42dd-8c94-b51b4646f514 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.428458] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.459515] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 72a11582-1fad-428a-bde1-e9d0b05731cd/72a11582-1fad-428a-bde1-e9d0b05731cd.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1477.459972] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63cd2a4d-077b-4116-98c7-bb4cc528802c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.486553] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for the task: (returnval){ [ 1477.486553] env[63241]: value = "task-1820182" [ 1477.486553] env[63241]: _type = "Task" [ 1477.486553] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.493498] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Releasing lock "refresh_cache-0b7c72e0-79b9-4435-9676-7a0e9afaf936" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.493858] env[63241]: DEBUG nova.compute.manager [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Instance network_info: |[{"id": "c65a81e1-9494-4ac5-b371-ba4abad9643b", "address": "fa:16:3e:60:e0:b4", "network": {"id": "31376013-55db-408f-96b1-73d026cbe5ef", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-482030346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81e16d69973c40f69085e32b5c24e1db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc65a81e1-94", "ovs_interfaceid": "c65a81e1-9494-4ac5-b371-ba4abad9643b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1477.494165] env[63241]: DEBUG oslo_concurrency.lockutils [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] Acquired lock "refresh_cache-0b7c72e0-79b9-4435-9676-7a0e9afaf936" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.494346] env[63241]: DEBUG nova.network.neutron [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Refreshing network info cache for port c65a81e1-9494-4ac5-b371-ba4abad9643b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1477.495621] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:e0:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '028bae2d-fe6c-4207-b4a3-3fab45fbf1d6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c65a81e1-9494-4ac5-b371-ba4abad9643b', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1477.504324] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Creating folder: Project (81e16d69973c40f69085e32b5c24e1db). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1477.509517] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07db81ff-67b6-42b1-81b3-d258db8a5841 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.511698] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820182, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.523325] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Created folder: Project (81e16d69973c40f69085e32b5c24e1db) in parent group-v376927. [ 1477.523543] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Creating folder: Instances. Parent ref: group-v377043. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1477.523796] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f0cc865-9112-4ed7-b606-04225714af9f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.535382] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Created folder: Instances in parent group-v377043. [ 1477.535643] env[63241]: DEBUG oslo.service.loopingcall [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1477.535843] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1477.536096] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8950e36-9b3c-4711-84ab-42f532cbdddc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.561280] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1477.561280] env[63241]: value = "task-1820185" [ 1477.561280] env[63241]: _type = "Task" [ 1477.561280] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.571710] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820185, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.575428] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5122076a-7302-47ab-b785-2e6ab4b42084 tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.576698] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.914s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.580107] env[63241]: INFO nova.compute.claims [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1477.631467] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "refresh_cache-5203c12e-14a0-4736-8185-8ead9a29b03b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.631697] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "refresh_cache-5203c12e-14a0-4736-8185-8ead9a29b03b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.631759] env[63241]: DEBUG nova.network.neutron [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.862647] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.896525] env[63241]: INFO nova.compute.manager [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Took 44.94 seconds to build instance. [ 1477.999671] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820182, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.070995] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522507da-746e-94dc-155d-25b8344394e5/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1478.072021] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c576ad-ecf5-43ca-86d7-349a22594563 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.082692] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522507da-746e-94dc-155d-25b8344394e5/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1478.082878] env[63241]: ERROR oslo_vmware.rw_handles [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522507da-746e-94dc-155d-25b8344394e5/disk-0.vmdk due to incomplete transfer. [ 1478.088051] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-bf0324f4-8c7d-49f8-99aa-8761c88acf8b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.092926] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820185, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.101417] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522507da-746e-94dc-155d-25b8344394e5/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1478.101623] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Uploaded image f39873a9-6d8a-4dbc-a330-041096a7e4cf to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1478.103987] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1478.104908] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c559d87c-8b09-4c56-80eb-362be9db53a9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.113596] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1478.113596] env[63241]: value = "task-1820186" [ 1478.113596] env[63241]: _type = "Task" [ 1478.113596] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.127164] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820186, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.212540] env[63241]: DEBUG nova.network.neutron [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1478.280546] env[63241]: DEBUG nova.network.neutron [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Updated VIF entry in instance network info cache for port c65a81e1-9494-4ac5-b371-ba4abad9643b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1478.280546] env[63241]: DEBUG nova.network.neutron [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Updating instance_info_cache with network_info: [{"id": "c65a81e1-9494-4ac5-b371-ba4abad9643b", "address": "fa:16:3e:60:e0:b4", "network": {"id": "31376013-55db-408f-96b1-73d026cbe5ef", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-482030346-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81e16d69973c40f69085e32b5c24e1db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc65a81e1-94", "ovs_interfaceid": "c65a81e1-9494-4ac5-b371-ba4abad9643b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.398777] env[63241]: DEBUG oslo_concurrency.lockutils [None req-784d64b4-d1dd-4f22-9396-a1eeef4675f6 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.138s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.498546] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820182, 'name': ReconfigVM_Task, 'duration_secs': 0.910562} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.498841] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 72a11582-1fad-428a-bde1-e9d0b05731cd/72a11582-1fad-428a-bde1-e9d0b05731cd.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1478.499568] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29784a64-3176-468e-a562-c8b2928a4751 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.512300] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for the task: (returnval){ [ 1478.512300] env[63241]: value = "task-1820187" [ 1478.512300] env[63241]: _type = "Task" [ 1478.512300] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.521705] env[63241]: DEBUG nova.network.neutron [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Updating instance_info_cache with network_info: [{"id": "032d2ad1-d0e9-4e9f-9ab4-654170139b7a", "address": "fa:16:3e:de:63:e8", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap032d2ad1-d0", "ovs_interfaceid": "032d2ad1-d0e9-4e9f-9ab4-654170139b7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.532366] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820187, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.574809] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820185, 'name': CreateVM_Task, 'duration_secs': 0.907792} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.575161] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1478.575747] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.575941] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.576606] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1478.576606] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99972a70-754a-459c-adc1-dbc4b42403a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.582846] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for the task: (returnval){ [ 1478.582846] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528384fa-f95a-bb4d-39e7-c12294bd6735" [ 1478.582846] env[63241]: _type = "Task" [ 1478.582846] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.593015] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528384fa-f95a-bb4d-39e7-c12294bd6735, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.627615] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820186, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.784370] env[63241]: DEBUG oslo_concurrency.lockutils [req-d333dcf9-8127-43c0-b17a-4610858bd54e req-18b33aed-1ba3-4359-ac4e-82aa06c9547c service nova] Releasing lock "refresh_cache-0b7c72e0-79b9-4435-9676-7a0e9afaf936" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.902645] env[63241]: DEBUG nova.compute.manager [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1479.025155] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820187, 'name': Rename_Task, 'duration_secs': 0.254342} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.029701] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1479.030768] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94c72e87-bd10-4114-af1b-a21392a89311 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.034017] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "refresh_cache-5203c12e-14a0-4736-8185-8ead9a29b03b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.034017] env[63241]: DEBUG nova.compute.manager [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Instance network_info: |[{"id": "032d2ad1-d0e9-4e9f-9ab4-654170139b7a", "address": "fa:16:3e:de:63:e8", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap032d2ad1-d0", "ovs_interfaceid": "032d2ad1-d0e9-4e9f-9ab4-654170139b7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1479.034634] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:63:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '184687d6-125a-4b58-bb5b-fdb404088eda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '032d2ad1-d0e9-4e9f-9ab4-654170139b7a', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1479.044616] env[63241]: DEBUG oslo.service.loopingcall [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1479.049100] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1479.049427] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for the task: (returnval){ [ 1479.049427] env[63241]: value = "task-1820188" [ 1479.049427] env[63241]: _type = "Task" [ 1479.049427] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.050615] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92b1d864-2706-498e-95ef-9e8e42710a2e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.089227] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820188, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.094130] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1479.094130] env[63241]: value = "task-1820189" [ 1479.094130] env[63241]: _type = "Task" [ 1479.094130] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.104475] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528384fa-f95a-bb4d-39e7-c12294bd6735, 'name': SearchDatastore_Task, 'duration_secs': 0.013793} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.105840] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.106193] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1479.108083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.108083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.108083] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1479.108083] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8fea897-b897-4e1a-8428-08deb07ac0b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.114745] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820189, 'name': CreateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.114745] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.114745] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.114745] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.114745] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.114745] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.120353] env[63241]: INFO nova.compute.manager [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Terminating instance [ 1479.124387] env[63241]: DEBUG nova.compute.manager [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1479.124646] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1479.126700] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2895d2-7a18-4bd9-90f4-a4d9509f6b84 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.129521] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1479.129598] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1479.133700] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79b8e7ac-e7c6-45c1-8a6c-8d2a0429c33e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.140610] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820186, 'name': Destroy_Task, 'duration_secs': 0.642117} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.141254] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Destroyed the VM [ 1479.141520] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1479.144275] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0e251798-5640-4c47-855f-c4605032d541 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.148184] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1479.148656] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for the task: (returnval){ [ 1479.148656] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526cf706-dd08-c160-6234-37dcb69a580c" [ 1479.148656] env[63241]: _type = "Task" [ 1479.148656] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.153276] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d034325-efb8-4031-aa7b-80b6565e0898 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.164819] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1479.164819] env[63241]: value = "task-1820190" [ 1479.164819] env[63241]: _type = "Task" [ 1479.164819] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.173395] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526cf706-dd08-c160-6234-37dcb69a580c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.175438] env[63241]: DEBUG oslo_vmware.api [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1479.175438] env[63241]: value = "task-1820191" [ 1479.175438] env[63241]: _type = "Task" [ 1479.175438] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.182777] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820190, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.191587] env[63241]: DEBUG oslo_vmware.api [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820191, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.198884] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4fec635-ebc0-470f-a707-79a1547f74c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.209577] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adeb63de-bbe0-4595-9f7b-83cbea3427a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.240948] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f5cc08-352f-4a62-9626-f5cfcaeeaa4f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.250057] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc8929d-a752-4396-ba3d-6ab1c1cf7d9f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.265662] env[63241]: DEBUG nova.compute.provider_tree [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1479.414999] env[63241]: DEBUG nova.compute.manager [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Received event network-vif-plugged-032d2ad1-d0e9-4e9f-9ab4-654170139b7a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1479.415328] env[63241]: DEBUG oslo_concurrency.lockutils [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] Acquiring lock "5203c12e-14a0-4736-8185-8ead9a29b03b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.415953] env[63241]: DEBUG oslo_concurrency.lockutils [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] Lock "5203c12e-14a0-4736-8185-8ead9a29b03b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.415953] env[63241]: DEBUG oslo_concurrency.lockutils [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] Lock "5203c12e-14a0-4736-8185-8ead9a29b03b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.416322] env[63241]: DEBUG nova.compute.manager [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] No waiting events found dispatching network-vif-plugged-032d2ad1-d0e9-4e9f-9ab4-654170139b7a {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1479.416526] env[63241]: WARNING nova.compute.manager [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Received unexpected event network-vif-plugged-032d2ad1-d0e9-4e9f-9ab4-654170139b7a for instance with vm_state building and task_state spawning. [ 1479.417927] env[63241]: DEBUG nova.compute.manager [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Received event network-changed-032d2ad1-d0e9-4e9f-9ab4-654170139b7a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1479.417927] env[63241]: DEBUG nova.compute.manager [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Refreshing instance network info cache due to event network-changed-032d2ad1-d0e9-4e9f-9ab4-654170139b7a. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1479.417927] env[63241]: DEBUG oslo_concurrency.lockutils [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] Acquiring lock "refresh_cache-5203c12e-14a0-4736-8185-8ead9a29b03b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.417927] env[63241]: DEBUG oslo_concurrency.lockutils [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] Acquired lock "refresh_cache-5203c12e-14a0-4736-8185-8ead9a29b03b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.417927] env[63241]: DEBUG nova.network.neutron [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Refreshing network info cache for port 032d2ad1-d0e9-4e9f-9ab4-654170139b7a {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1479.436343] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.591657] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820188, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.609976] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820189, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.670480] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526cf706-dd08-c160-6234-37dcb69a580c, 'name': SearchDatastore_Task, 'duration_secs': 0.03023} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.675275] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eb0ced5-684f-46e7-a660-25ff251881e7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.686473] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820190, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.688644] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for the task: (returnval){ [ 1479.688644] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5235b394-76b0-d81e-f8d4-28db6e77ea4b" [ 1479.688644] env[63241]: _type = "Task" [ 1479.688644] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.696662] env[63241]: DEBUG oslo_vmware.api [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820191, 'name': PowerOffVM_Task, 'duration_secs': 0.257886} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.698164] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1479.698716] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1479.699121] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06d56c83-df0a-4dcd-80ea-b5bceda185fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.709381] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5235b394-76b0-d81e-f8d4-28db6e77ea4b, 'name': SearchDatastore_Task, 'duration_secs': 0.01388} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.709381] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.709789] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0b7c72e0-79b9-4435-9676-7a0e9afaf936/0b7c72e0-79b9-4435-9676-7a0e9afaf936.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1479.712201] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47ac9640-2904-4a31-a930-3cdd4bfd333a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.721476] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for the task: (returnval){ [ 1479.721476] env[63241]: value = "task-1820193" [ 1479.721476] env[63241]: _type = "Task" [ 1479.721476] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.731587] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820193, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.795779] env[63241]: ERROR nova.scheduler.client.report [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [req-96375d3e-c03e-40bd-8a90-1322961b59cf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-96375d3e-c03e-40bd-8a90-1322961b59cf"}]} [ 1479.832556] env[63241]: DEBUG nova.scheduler.client.report [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1479.856476] env[63241]: DEBUG nova.scheduler.client.report [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1479.856984] env[63241]: DEBUG nova.compute.provider_tree [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1479.861747] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eef08e2-71a1-4b64-ab42-2ca029ce039d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.870095] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b89930-259c-4738-9ad0-9aa6de3d0351 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Suspending the VM {{(pid=63241) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1479.870815] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9d3649c8-e78b-44b8-977f-39b2ff7487bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.874073] env[63241]: DEBUG nova.scheduler.client.report [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1479.878659] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquiring lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.878659] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.878735] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquiring lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.878985] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.879217] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.882874] env[63241]: DEBUG oslo_vmware.api [None req-a4b89930-259c-4738-9ad0-9aa6de3d0351 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1479.882874] env[63241]: value = "task-1820194" [ 1479.882874] env[63241]: _type = "Task" [ 1479.882874] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.883055] env[63241]: INFO nova.compute.manager [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Terminating instance [ 1479.891841] env[63241]: DEBUG nova.compute.manager [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1479.891841] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1479.891841] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89579e5d-c504-4a6b-b82d-40613f925fbc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.901886] env[63241]: DEBUG nova.scheduler.client.report [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1479.911605] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1479.911605] env[63241]: DEBUG oslo_vmware.api [None req-a4b89930-259c-4738-9ad0-9aa6de3d0351 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820194, 'name': SuspendVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.911605] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1717c27-8a6b-4085-a085-57d5178a1bf4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.921572] env[63241]: DEBUG oslo_vmware.api [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for the task: (returnval){ [ 1479.921572] env[63241]: value = "task-1820195" [ 1479.921572] env[63241]: _type = "Task" [ 1479.921572] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.935896] env[63241]: DEBUG oslo_vmware.api [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1820195, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.092847] env[63241]: DEBUG oslo_vmware.api [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820188, 'name': PowerOnVM_Task, 'duration_secs': 0.634618} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.097924] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1480.098514] env[63241]: INFO nova.compute.manager [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Took 9.35 seconds to spawn the instance on the hypervisor. [ 1480.099245] env[63241]: DEBUG nova.compute.manager [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1480.100444] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fea67c5-27f8-460a-bb66-810b07818cf9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.133966] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820189, 'name': CreateVM_Task, 'duration_secs': 0.595137} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.137503] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1480.139130] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.139130] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.139496] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1480.139682] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4edb675-4c74-453f-8432-b0424d69d12b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.147900] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1480.147900] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c16434-c204-81cf-7e4d-fc86514b1c5c" [ 1480.147900] env[63241]: _type = "Task" [ 1480.147900] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.162484] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c16434-c204-81cf-7e4d-fc86514b1c5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.183698] env[63241]: DEBUG oslo_vmware.api [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820190, 'name': RemoveSnapshot_Task, 'duration_secs': 0.95947} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.183698] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1480.183698] env[63241]: INFO nova.compute.manager [None req-1553adef-8fc3-4c53-9f8c-356b1bff526a tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Took 17.92 seconds to snapshot the instance on the hypervisor. [ 1480.204900] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1480.205355] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1480.205355] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Deleting the datastore file [datastore1] c1c85cc0-53f1-4920-8f3e-6dd69414fa85 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1480.205614] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09d5ccbb-4a24-4bac-8d68-cf1d1e087181 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.219020] env[63241]: DEBUG oslo_vmware.api [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1480.219020] env[63241]: value = "task-1820196" [ 1480.219020] env[63241]: _type = "Task" [ 1480.219020] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.240724] env[63241]: DEBUG oslo_vmware.api [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.244591] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820193, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.406098] env[63241]: DEBUG oslo_vmware.api [None req-a4b89930-259c-4738-9ad0-9aa6de3d0351 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820194, 'name': SuspendVM_Task} progress is 87%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.433288] env[63241]: DEBUG oslo_vmware.api [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1820195, 'name': PowerOffVM_Task, 'duration_secs': 0.473391} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.436153] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1480.436346] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1480.437696] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4d199b7-45b7-4854-a718-f73af5c7797a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.574769] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f05518-0372-4854-8a09-a6b10f086d4c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.584502] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa86467-15cd-40dc-b4c9-551515dda205 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.616976] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d09312-bf58-4fd6-a246-711262c4e69d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.626208] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc6482b-5151-46fa-a8d5-96039da41906 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.646817] env[63241]: DEBUG nova.compute.provider_tree [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1480.652753] env[63241]: INFO nova.compute.manager [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Took 37.90 seconds to build instance. [ 1480.660494] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c16434-c204-81cf-7e4d-fc86514b1c5c, 'name': SearchDatastore_Task, 'duration_secs': 0.065588} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.661630] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.661718] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1480.663128] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.663128] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.663128] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1480.663128] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd95e3a4-8543-45e8-8326-6f88ef159720 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.675800] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1480.676411] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1480.676926] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5b36f99-8214-4257-a88d-45aa27165d0c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.684306] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1480.684306] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c2311c-0753-6d69-be73-f02c344204f0" [ 1480.684306] env[63241]: _type = "Task" [ 1480.684306] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.697193] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c2311c-0753-6d69-be73-f02c344204f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.732064] env[63241]: DEBUG nova.network.neutron [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Updated VIF entry in instance network info cache for port 032d2ad1-d0e9-4e9f-9ab4-654170139b7a. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1480.732675] env[63241]: DEBUG nova.network.neutron [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Updating instance_info_cache with network_info: [{"id": "032d2ad1-d0e9-4e9f-9ab4-654170139b7a", "address": "fa:16:3e:de:63:e8", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap032d2ad1-d0", "ovs_interfaceid": "032d2ad1-d0e9-4e9f-9ab4-654170139b7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.745267] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820193, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.738729} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.745588] env[63241]: DEBUG oslo_vmware.api [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.747124] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0b7c72e0-79b9-4435-9676-7a0e9afaf936/0b7c72e0-79b9-4435-9676-7a0e9afaf936.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1480.747409] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1480.747732] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1480.747908] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1480.748156] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Deleting the datastore file [datastore1] 3c51d4dc-5a2c-4483-9aa5-8bab532971d4 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1480.748964] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e883995-9069-4442-8dbb-606eb13d892d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.750765] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8a5c399-6b32-4c64-8c5c-c606be11e4b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.761340] env[63241]: DEBUG oslo_vmware.api [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for the task: (returnval){ [ 1480.761340] env[63241]: value = "task-1820199" [ 1480.761340] env[63241]: _type = "Task" [ 1480.761340] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.762481] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for the task: (returnval){ [ 1480.762481] env[63241]: value = "task-1820198" [ 1480.762481] env[63241]: _type = "Task" [ 1480.762481] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.776531] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820198, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.780385] env[63241]: DEBUG oslo_vmware.api [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1820199, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.901081] env[63241]: DEBUG oslo_vmware.api [None req-a4b89930-259c-4738-9ad0-9aa6de3d0351 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820194, 'name': SuspendVM_Task, 'duration_secs': 0.858854} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.901948] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b89930-259c-4738-9ad0-9aa6de3d0351 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Suspended the VM {{(pid=63241) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1480.901948] env[63241]: DEBUG nova.compute.manager [None req-a4b89930-259c-4738-9ad0-9aa6de3d0351 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1480.902864] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc402cd-ec6c-40fe-a9c6-e978e1c3818a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.157427] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de3f86a9-eb17-492e-8c5c-663dc41fb389 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Lock "72a11582-1fad-428a-bde1-e9d0b05731cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.225s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.184431] env[63241]: DEBUG nova.scheduler.client.report [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 60 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1481.184743] env[63241]: DEBUG nova.compute.provider_tree [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 60 to 61 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1481.184935] env[63241]: DEBUG nova.compute.provider_tree [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1481.201038] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c2311c-0753-6d69-be73-f02c344204f0, 'name': SearchDatastore_Task, 'duration_secs': 0.035882} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.201869] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a78d0cc7-4c83-4ab0-b54b-4b02148fe737 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.210015] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1481.210015] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520b4da4-a0a0-764c-ec8f-e76d1c687fc6" [ 1481.210015] env[63241]: _type = "Task" [ 1481.210015] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.218884] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520b4da4-a0a0-764c-ec8f-e76d1c687fc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.235214] env[63241]: DEBUG oslo_vmware.api [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820196, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.570579} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.235214] env[63241]: DEBUG oslo_concurrency.lockutils [req-63b2ccfc-1bb6-42ea-b442-159d1e3fbd49 req-3cb86c3b-dbf6-434c-a37f-2909779f4fe0 service nova] Releasing lock "refresh_cache-5203c12e-14a0-4736-8185-8ead9a29b03b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.235597] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1481.235597] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1481.235790] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1481.235909] env[63241]: INFO nova.compute.manager [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Took 2.11 seconds to destroy the instance on the hypervisor. [ 1481.236232] env[63241]: DEBUG oslo.service.loopingcall [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1481.236438] env[63241]: DEBUG nova.compute.manager [-] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1481.236568] env[63241]: DEBUG nova.network.neutron [-] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1481.275753] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820198, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082122} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.278896] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1481.279223] env[63241]: DEBUG oslo_vmware.api [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1820199, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.279942] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6c0e70-fa02-4538-ab68-fcd1d654ca44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.303922] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 0b7c72e0-79b9-4435-9676-7a0e9afaf936/0b7c72e0-79b9-4435-9676-7a0e9afaf936.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1481.304290] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ff827f7-9d37-44ea-a20c-0fd646548c0a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.328019] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for the task: (returnval){ [ 1481.328019] env[63241]: value = "task-1820200" [ 1481.328019] env[63241]: _type = "Task" [ 1481.328019] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.337489] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820200, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.661551] env[63241]: DEBUG nova.compute.manager [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1481.695744] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.117s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.695744] env[63241]: DEBUG nova.compute.manager [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1481.698466] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.938s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.698686] env[63241]: DEBUG nova.objects.instance [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lazy-loading 'resources' on Instance uuid c390d1ca-a199-4df6-847a-b543630a7bf5 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1481.723850] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520b4da4-a0a0-764c-ec8f-e76d1c687fc6, 'name': SearchDatastore_Task, 'duration_secs': 0.028839} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.724169] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.724705] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 5203c12e-14a0-4736-8185-8ead9a29b03b/5203c12e-14a0-4736-8185-8ead9a29b03b.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1481.724818] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b1eccd6-0680-4eb0-9ab9-ca1521a318ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.734476] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1481.734476] env[63241]: value = "task-1820201" [ 1481.734476] env[63241]: _type = "Task" [ 1481.734476] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.747521] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820201, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.774102] env[63241]: DEBUG oslo_vmware.api [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Task: {'id': task-1820199, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.540809} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.774369] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1481.774553] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1481.774731] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1481.774899] env[63241]: INFO nova.compute.manager [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Took 1.88 seconds to destroy the instance on the hypervisor. [ 1481.775159] env[63241]: DEBUG oslo.service.loopingcall [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1481.775358] env[63241]: DEBUG nova.compute.manager [-] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1481.775547] env[63241]: DEBUG nova.network.neutron [-] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1481.845867] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820200, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.195578] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.201686] env[63241]: DEBUG nova.compute.utils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1482.203292] env[63241]: DEBUG nova.compute.manager [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1482.203521] env[63241]: DEBUG nova.network.neutron [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1482.252464] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820201, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.307208] env[63241]: DEBUG nova.policy [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa5224c96c3545269f4f45be620a7cdf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98582d7ee18145318ee5a05cac36781e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1482.319083] env[63241]: DEBUG nova.network.neutron [-] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.345836] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820200, 'name': ReconfigVM_Task, 'duration_secs': 0.678383} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.346510] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 0b7c72e0-79b9-4435-9676-7a0e9afaf936/0b7c72e0-79b9-4435-9676-7a0e9afaf936.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1482.347294] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b9e1807-775c-4505-90de-6e74eb488790 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.357765] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for the task: (returnval){ [ 1482.357765] env[63241]: value = "task-1820202" [ 1482.357765] env[63241]: _type = "Task" [ 1482.357765] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.369538] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820202, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.511903] env[63241]: DEBUG nova.compute.manager [req-2d9d8a57-7e4c-43cc-94af-3ae835634fa1 req-8db17ee6-9de4-4351-afc8-40a0043048f3 service nova] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Received event network-vif-deleted-70f92de2-a999-4655-b551-a67d4f67f289 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1482.635809] env[63241]: DEBUG oslo_concurrency.lockutils [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "interface-27177719-5090-43de-9bca-6db6bebab7b4-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.636090] env[63241]: DEBUG oslo_concurrency.lockutils [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-27177719-5090-43de-9bca-6db6bebab7b4-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.636716] env[63241]: DEBUG nova.objects.instance [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'flavor' on Instance uuid 27177719-5090-43de-9bca-6db6bebab7b4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1482.660892] env[63241]: DEBUG nova.network.neutron [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Successfully created port: 1e273a71-b8e7-4187-a1cd-c61f52ba3e85 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1482.716858] env[63241]: DEBUG nova.compute.manager [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1482.753826] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820201, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.811571} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.756694] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 5203c12e-14a0-4736-8185-8ead9a29b03b/5203c12e-14a0-4736-8185-8ead9a29b03b.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1482.756694] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1482.756971] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-458c4fb2-af52-47f2-917d-2bc0cd7f4497 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.769941] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1482.769941] env[63241]: value = "task-1820203" [ 1482.769941] env[63241]: _type = "Task" [ 1482.769941] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.786167] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820203, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.823161] env[63241]: INFO nova.compute.manager [-] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Took 1.59 seconds to deallocate network for instance. [ 1482.850554] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0a5653-0627-48de-b900-22348b564ec1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.867618] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3459337-3cb4-4673-a7cc-69b02a31079a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.878801] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820202, 'name': Rename_Task, 'duration_secs': 0.212102} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.909922] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1482.912457] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1edb564-ccbf-4bd1-bf6d-8f51bafa2206 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.918629] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce11870d-9cd4-4f6e-ac15-f061fbafd146 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.932846] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd938a4b-5ecb-4ef7-a039-64482e1fe436 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.936963] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for the task: (returnval){ [ 1482.936963] env[63241]: value = "task-1820204" [ 1482.936963] env[63241]: _type = "Task" [ 1482.936963] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.951637] env[63241]: DEBUG nova.compute.provider_tree [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.958847] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820204, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.161541] env[63241]: DEBUG nova.compute.manager [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1483.162864] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e9d491-fed6-48e1-9276-490cd5b1bc9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.284747] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820203, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155785} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.285076] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1483.286120] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa11dbe-744d-4c35-a5d2-01dd7b89c116 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.316892] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 5203c12e-14a0-4736-8185-8ead9a29b03b/5203c12e-14a0-4736-8185-8ead9a29b03b.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1483.317291] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5d67eaf-1f30-4997-92a7-bb07692874d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.334992] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.336759] env[63241]: DEBUG nova.objects.instance [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'pci_requests' on Instance uuid 27177719-5090-43de-9bca-6db6bebab7b4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1483.342654] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1483.342654] env[63241]: value = "task-1820205" [ 1483.342654] env[63241]: _type = "Task" [ 1483.342654] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.353588] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820205, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.422225] env[63241]: DEBUG nova.network.neutron [-] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.456158] env[63241]: DEBUG nova.scheduler.client.report [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1483.460284] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820204, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.677967] env[63241]: INFO nova.compute.manager [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] instance snapshotting [ 1483.678281] env[63241]: WARNING nova.compute.manager [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1483.681579] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2710e12-d823-4631-a1ac-9e72f377ac44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.709946] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b8c05c-c153-4227-89e9-6816c1c28510 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.727282] env[63241]: DEBUG nova.compute.manager [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1483.770638] env[63241]: DEBUG nova.virt.hardware [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1483.770895] env[63241]: DEBUG nova.virt.hardware [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1483.771063] env[63241]: DEBUG nova.virt.hardware [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1483.771333] env[63241]: DEBUG nova.virt.hardware [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1483.771487] env[63241]: DEBUG nova.virt.hardware [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1483.771634] env[63241]: DEBUG nova.virt.hardware [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1483.771917] env[63241]: DEBUG nova.virt.hardware [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1483.772008] env[63241]: DEBUG nova.virt.hardware [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1483.772201] env[63241]: DEBUG nova.virt.hardware [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1483.772370] env[63241]: DEBUG nova.virt.hardware [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1483.772542] env[63241]: DEBUG nova.virt.hardware [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1483.773492] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07267d5a-6fc4-4971-9c3c-dcd2b6ac9b19 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.786136] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577ec779-4239-4ac4-907e-dd6bcbef9b93 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.840229] env[63241]: DEBUG nova.objects.base [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Object Instance<27177719-5090-43de-9bca-6db6bebab7b4> lazy-loaded attributes: flavor,pci_requests {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1483.840745] env[63241]: DEBUG nova.network.neutron [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1483.853503] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820205, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.897698] env[63241]: DEBUG nova.policy [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54dc853b6f204a75ae7612f9fbd2d1f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecafb0abbdc74501b22b20b797c4c60c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1483.930520] env[63241]: INFO nova.compute.manager [-] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Took 2.15 seconds to deallocate network for instance. [ 1483.951325] env[63241]: DEBUG oslo_vmware.api [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820204, 'name': PowerOnVM_Task, 'duration_secs': 0.526207} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.951325] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1483.951325] env[63241]: INFO nova.compute.manager [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Took 9.45 seconds to spawn the instance on the hypervisor. [ 1483.951738] env[63241]: DEBUG nova.compute.manager [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1483.954018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae26151-8c7e-4e15-a9d6-0c0bb3a0e0e3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.962855] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.263s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.971852] env[63241]: DEBUG oslo_concurrency.lockutils [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.047s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.973676] env[63241]: INFO nova.compute.claims [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1484.015717] env[63241]: INFO nova.scheduler.client.report [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Deleted allocations for instance c390d1ca-a199-4df6-847a-b543630a7bf5 [ 1484.154995] env[63241]: DEBUG nova.compute.manager [req-dd2f47ea-61ff-4aaa-a5da-81ffcaa1583e req-bb433846-49ec-4be1-b9c3-d12ceca062ad service nova] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Received event network-vif-deleted-1e0eeec7-9caf-4069-8cad-d1d0d038ea2b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1484.225012] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1484.225012] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-871f365f-c6c7-4248-b3e7-9c011ae64ab4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.237653] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1484.237653] env[63241]: value = "task-1820206" [ 1484.237653] env[63241]: _type = "Task" [ 1484.237653] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.249084] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820206, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.300676] env[63241]: DEBUG nova.network.neutron [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Successfully created port: 6c744114-882e-410b-a8fd-10bac7d2be70 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1484.354862] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820205, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.438761] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.487198] env[63241]: INFO nova.compute.manager [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Took 38.44 seconds to build instance. [ 1484.531601] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c0ca98d2-e961-4410-a145-a28d1743eecc tempest-ServerShowV254Test-871891608 tempest-ServerShowV254Test-871891608-project-member] Lock "c390d1ca-a199-4df6-847a-b543630a7bf5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.511s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.751702] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820206, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.857848] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820205, 'name': ReconfigVM_Task, 'duration_secs': 1.062843} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.858223] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 5203c12e-14a0-4736-8185-8ead9a29b03b/5203c12e-14a0-4736-8185-8ead9a29b03b.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1484.858962] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcc44ba9-3e2c-416e-8642-49c3b562b982 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.871664] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1484.871664] env[63241]: value = "task-1820207" [ 1484.871664] env[63241]: _type = "Task" [ 1484.871664] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.882948] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820207, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.993032] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2dc26530-6454-4b34-972a-8ae28f4922d4 tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.476s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.127114] env[63241]: DEBUG nova.network.neutron [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Successfully updated port: 1e273a71-b8e7-4187-a1cd-c61f52ba3e85 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1485.252309] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820206, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.383881] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820207, 'name': Rename_Task, 'duration_secs': 0.187979} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.383881] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1485.387269] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10a9004c-5c2c-43ca-989a-8a116202c2d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.398774] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1485.398774] env[63241]: value = "task-1820208" [ 1485.398774] env[63241]: _type = "Task" [ 1485.398774] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.411517] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820208, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.575553] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a26b86b-03c4-4882-a3b8-36e01f3cf83a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.585042] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4684fc2-596c-47da-bbd6-347cb9f68d47 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.620366] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86a90da-5f0b-4b3a-993e-b970dbcd92fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.632707] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "refresh_cache-e3df56a7-eb82-4297-8aa3-f77c0380b6ec" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.632893] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "refresh_cache-e3df56a7-eb82-4297-8aa3-f77c0380b6ec" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.633061] env[63241]: DEBUG nova.network.neutron [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1485.635245] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b080eb9-4e21-4982-a02a-bf49ec735bed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.655354] env[63241]: DEBUG nova.compute.provider_tree [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.756684] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820206, 'name': CreateSnapshot_Task, 'duration_secs': 1.040587} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.756972] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1485.757919] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d61927b-67ff-4be9-b7f5-95cbd2ca76ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.840604] env[63241]: DEBUG nova.compute.manager [req-9367f396-c05f-4418-9306-8f931f0af8fa req-21bf2bd2-bf51-4db0-8764-a7202c9aaa8d service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Received event network-changed-bd89b471-e155-45fc-9b21-40bc75f8f48a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1485.840818] env[63241]: DEBUG nova.compute.manager [req-9367f396-c05f-4418-9306-8f931f0af8fa req-21bf2bd2-bf51-4db0-8764-a7202c9aaa8d service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Refreshing instance network info cache due to event network-changed-bd89b471-e155-45fc-9b21-40bc75f8f48a. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1485.843723] env[63241]: DEBUG oslo_concurrency.lockutils [req-9367f396-c05f-4418-9306-8f931f0af8fa req-21bf2bd2-bf51-4db0-8764-a7202c9aaa8d service nova] Acquiring lock "refresh_cache-72a11582-1fad-428a-bde1-e9d0b05731cd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.843723] env[63241]: DEBUG oslo_concurrency.lockutils [req-9367f396-c05f-4418-9306-8f931f0af8fa req-21bf2bd2-bf51-4db0-8764-a7202c9aaa8d service nova] Acquired lock "refresh_cache-72a11582-1fad-428a-bde1-e9d0b05731cd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.843723] env[63241]: DEBUG nova.network.neutron [req-9367f396-c05f-4418-9306-8f931f0af8fa req-21bf2bd2-bf51-4db0-8764-a7202c9aaa8d service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Refreshing network info cache for port bd89b471-e155-45fc-9b21-40bc75f8f48a {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1485.912995] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820208, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.093398] env[63241]: DEBUG nova.network.neutron [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Successfully updated port: 6c744114-882e-410b-a8fd-10bac7d2be70 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1486.158233] env[63241]: DEBUG nova.scheduler.client.report [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1486.187137] env[63241]: DEBUG nova.network.neutron [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1486.279451] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1486.279790] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-76c83fe7-8a63-4c3b-a75c-c9c8f8be3e77 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.290085] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1486.290085] env[63241]: value = "task-1820209" [ 1486.290085] env[63241]: _type = "Task" [ 1486.290085] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.300785] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820209, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.355842] env[63241]: DEBUG nova.network.neutron [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Updating instance_info_cache with network_info: [{"id": "1e273a71-b8e7-4187-a1cd-c61f52ba3e85", "address": "fa:16:3e:e3:9b:1d", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e273a71-b8", "ovs_interfaceid": "1e273a71-b8e7-4187-a1cd-c61f52ba3e85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.413694] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820208, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.598582] env[63241]: DEBUG oslo_concurrency.lockutils [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.598769] env[63241]: DEBUG oslo_concurrency.lockutils [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.599039] env[63241]: DEBUG nova.network.neutron [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1486.665769] env[63241]: DEBUG oslo_concurrency.lockutils [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.665769] env[63241]: DEBUG nova.compute.manager [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1486.669160] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.235s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.670725] env[63241]: INFO nova.compute.claims [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1486.806531] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820209, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.860587] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "refresh_cache-e3df56a7-eb82-4297-8aa3-f77c0380b6ec" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.860587] env[63241]: DEBUG nova.compute.manager [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Instance network_info: |[{"id": "1e273a71-b8e7-4187-a1cd-c61f52ba3e85", "address": "fa:16:3e:e3:9b:1d", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e273a71-b8", "ovs_interfaceid": "1e273a71-b8e7-4187-a1cd-c61f52ba3e85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1486.860587] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:9b:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '111a2767-1b06-4fe5-852b-40c9b5a699fd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e273a71-b8e7-4187-a1cd-c61f52ba3e85', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1486.877020] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Creating folder: Project (98582d7ee18145318ee5a05cac36781e). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1486.878566] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf5299c6-53a4-42b0-9cd0-e525ade3eaf6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.893256] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Created folder: Project (98582d7ee18145318ee5a05cac36781e) in parent group-v376927. [ 1486.893256] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Creating folder: Instances. Parent ref: group-v377049. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1486.893256] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca685204-bc42-4053-802d-4315153dc259 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.911243] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Created folder: Instances in parent group-v377049. [ 1486.911639] env[63241]: DEBUG oslo.service.loopingcall [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1486.911936] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1486.912618] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9298129-4af8-45e6-91d2-c21e408b632e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.932138] env[63241]: DEBUG oslo_vmware.api [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820208, 'name': PowerOnVM_Task, 'duration_secs': 1.296084} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.933009] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1486.933356] env[63241]: INFO nova.compute.manager [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Took 10.75 seconds to spawn the instance on the hypervisor. [ 1486.933667] env[63241]: DEBUG nova.compute.manager [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1486.935208] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c38cb6b-c7e2-4745-8066-88aee05b10a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.939340] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1486.939340] env[63241]: value = "task-1820212" [ 1486.939340] env[63241]: _type = "Task" [ 1486.939340] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.954401] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820212, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.025382] env[63241]: DEBUG nova.network.neutron [req-9367f396-c05f-4418-9306-8f931f0af8fa req-21bf2bd2-bf51-4db0-8764-a7202c9aaa8d service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Updated VIF entry in instance network info cache for port bd89b471-e155-45fc-9b21-40bc75f8f48a. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1487.025869] env[63241]: DEBUG nova.network.neutron [req-9367f396-c05f-4418-9306-8f931f0af8fa req-21bf2bd2-bf51-4db0-8764-a7202c9aaa8d service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Updating instance_info_cache with network_info: [{"id": "bd89b471-e155-45fc-9b21-40bc75f8f48a", "address": "fa:16:3e:6e:46:88", "network": {"id": "9e6ff769-6f81-4d69-9710-49158736659b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1337177874-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d734ba31d184477f9d6cbb2bc3f63706", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd89b471-e1", "ovs_interfaceid": "bd89b471-e155-45fc-9b21-40bc75f8f48a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.101975] env[63241]: DEBUG nova.compute.manager [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Received event network-vif-plugged-6c744114-882e-410b-a8fd-10bac7d2be70 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1487.102205] env[63241]: DEBUG oslo_concurrency.lockutils [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] Acquiring lock "27177719-5090-43de-9bca-6db6bebab7b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.102427] env[63241]: DEBUG oslo_concurrency.lockutils [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] Lock "27177719-5090-43de-9bca-6db6bebab7b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.102578] env[63241]: DEBUG oslo_concurrency.lockutils [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] Lock "27177719-5090-43de-9bca-6db6bebab7b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.102838] env[63241]: DEBUG nova.compute.manager [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] No waiting events found dispatching network-vif-plugged-6c744114-882e-410b-a8fd-10bac7d2be70 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1487.103010] env[63241]: WARNING nova.compute.manager [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Received unexpected event network-vif-plugged-6c744114-882e-410b-a8fd-10bac7d2be70 for instance with vm_state active and task_state None. [ 1487.103219] env[63241]: DEBUG nova.compute.manager [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Received event network-changed-6c744114-882e-410b-a8fd-10bac7d2be70 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1487.103324] env[63241]: DEBUG nova.compute.manager [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Refreshing instance network info cache due to event network-changed-6c744114-882e-410b-a8fd-10bac7d2be70. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1487.103799] env[63241]: DEBUG oslo_concurrency.lockutils [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] Acquiring lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.178751] env[63241]: DEBUG nova.compute.utils [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1487.182870] env[63241]: DEBUG nova.compute.manager [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1487.183068] env[63241]: DEBUG nova.network.neutron [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1487.185980] env[63241]: WARNING nova.network.neutron [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] cafd3d43-975a-4836-8948-2f1b47e56666 already exists in list: networks containing: ['cafd3d43-975a-4836-8948-2f1b47e56666']. ignoring it [ 1487.262461] env[63241]: DEBUG nova.policy [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6eee363c054d433dbd93b0bfce5d8432', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33b2cbeab90443c48beaa0b41ba17c1c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1487.307405] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820209, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.454436] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820212, 'name': CreateVM_Task, 'duration_secs': 0.449754} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.454669] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1487.455427] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.455597] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.455943] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1487.460419] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fae8c440-e02c-4d5f-ad4d-dbca7b795298 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.466860] env[63241]: INFO nova.compute.manager [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Took 40.49 seconds to build instance. [ 1487.469452] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1487.469452] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a7eec4-416b-6fd1-d48c-c11ed77a4e1b" [ 1487.469452] env[63241]: _type = "Task" [ 1487.469452] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.483742] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a7eec4-416b-6fd1-d48c-c11ed77a4e1b, 'name': SearchDatastore_Task} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.484029] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.484561] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1487.484853] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.485219] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.485419] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1487.485893] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27c05222-abf0-4204-8b60-f53f58ce4b0e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.498224] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1487.498891] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1487.499919] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29714bf2-2547-4d4e-88b1-adcefddff49f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.507371] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1487.507371] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52acc947-a01c-51b7-1c89-65f5af67fcbe" [ 1487.507371] env[63241]: _type = "Task" [ 1487.507371] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.516303] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52acc947-a01c-51b7-1c89-65f5af67fcbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.528623] env[63241]: DEBUG oslo_concurrency.lockutils [req-9367f396-c05f-4418-9306-8f931f0af8fa req-21bf2bd2-bf51-4db0-8764-a7202c9aaa8d service nova] Releasing lock "refresh_cache-72a11582-1fad-428a-bde1-e9d0b05731cd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.580829] env[63241]: DEBUG nova.network.neutron [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Successfully created port: 0d383637-3a9e-4430-80c1-4b6b738e5817 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.684081] env[63241]: DEBUG nova.compute.manager [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1487.805821] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820209, 'name': CloneVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.970741] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8530d65d-65f4-4a8b-8284-dec44b00cc04 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "5203c12e-14a0-4736-8185-8ead9a29b03b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.930s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.983335] env[63241]: DEBUG nova.network.neutron [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Updating instance_info_cache with network_info: [{"id": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "address": "fa:16:3e:fd:57:de", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e94bb05-04", "ovs_interfaceid": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6c744114-882e-410b-a8fd-10bac7d2be70", "address": "fa:16:3e:8c:7f:12", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c744114-88", "ovs_interfaceid": "6c744114-882e-410b-a8fd-10bac7d2be70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.019533] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52acc947-a01c-51b7-1c89-65f5af67fcbe, 'name': SearchDatastore_Task, 'duration_secs': 0.012408} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.024846] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4a6217b-55ef-44f1-bfc0-182f4c94b791 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.033524] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1488.033524] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520bdd45-b294-510e-886c-26c9802ee4b1" [ 1488.033524] env[63241]: _type = "Task" [ 1488.033524] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.048598] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520bdd45-b294-510e-886c-26c9802ee4b1, 'name': SearchDatastore_Task, 'duration_secs': 0.012799} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.049754] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.049754] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e3df56a7-eb82-4297-8aa3-f77c0380b6ec/e3df56a7-eb82-4297-8aa3-f77c0380b6ec.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1488.049985] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4fcde81a-b42b-4f73-a9aa-0310434963b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.067657] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1488.067657] env[63241]: value = "task-1820213" [ 1488.067657] env[63241]: _type = "Task" [ 1488.067657] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.078334] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820213, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.193676] env[63241]: INFO nova.virt.block_device [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Booting with volume f8c6db70-e484-49c7-8211-edd49f1c6d75 at /dev/sda [ 1488.261406] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75424964-a456-4515-bd6e-432224cd9e9d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.278470] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e58b78-8e31-4f86-b703-6bebc89b47dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.293673] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff413172-f9fa-46db-acbc-4292ee3987ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.310200] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f538e9-af22-4f36-9fcf-e5a3de09b283 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.316608] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820209, 'name': CloneVM_Task, 'duration_secs': 1.569125} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.334027] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Created linked-clone VM from snapshot [ 1488.334027] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6466e659-5b96-4b65-8cd6-23ee8b95a94d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.336514] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5081b029-10df-4d2d-abd9-9a7b49761569 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.364289] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928cc273-5a10-4da2-92bc-caf81cb07f0a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.371148] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Uploading image 29b41d3f-4ad1-4bd1-9f5e-450efb73d68d {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1488.380607] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a700843c-6046-4301-80df-feb478914921 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.398225] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1249c1-f47d-4561-a94d-ff1900ec4fc7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.408221] env[63241]: DEBUG oslo_vmware.rw_handles [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1488.408221] env[63241]: value = "vm-377048" [ 1488.408221] env[63241]: _type = "VirtualMachine" [ 1488.408221] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1488.408800] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6e25edef-3525-409d-82aa-043c7fffab05 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.435353] env[63241]: DEBUG nova.compute.provider_tree [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.437920] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7033c88d-2032-4512-9a21-8076fe23b65c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.444273] env[63241]: DEBUG oslo_vmware.rw_handles [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lease: (returnval){ [ 1488.444273] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52af2d48-0660-c302-3c4b-db3a43be5c7f" [ 1488.444273] env[63241]: _type = "HttpNfcLease" [ 1488.444273] env[63241]: } obtained for exporting VM: (result){ [ 1488.444273] env[63241]: value = "vm-377048" [ 1488.444273] env[63241]: _type = "VirtualMachine" [ 1488.444273] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1488.444273] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the lease: (returnval){ [ 1488.444273] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52af2d48-0660-c302-3c4b-db3a43be5c7f" [ 1488.444273] env[63241]: _type = "HttpNfcLease" [ 1488.444273] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1488.449128] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0c6e2d-d0ea-4c2a-b294-2c5486d63f58 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.456634] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1488.456634] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52af2d48-0660-c302-3c4b-db3a43be5c7f" [ 1488.456634] env[63241]: _type = "HttpNfcLease" [ 1488.456634] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1488.457176] env[63241]: DEBUG oslo_vmware.rw_handles [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1488.457176] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52af2d48-0660-c302-3c4b-db3a43be5c7f" [ 1488.457176] env[63241]: _type = "HttpNfcLease" [ 1488.457176] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1488.457902] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2a3ec5-6040-4938-a9a2-f3a2de04136e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.465997] env[63241]: DEBUG nova.virt.block_device [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Updating existing volume attachment record: b6ac45ed-08a1-421f-a5bc-d2e199222d7a {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1488.472136] env[63241]: DEBUG oslo_vmware.rw_handles [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffa4df-d58f-e8c7-d3ad-909d453c8bde/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1488.472381] env[63241]: DEBUG oslo_vmware.rw_handles [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffa4df-d58f-e8c7-d3ad-909d453c8bde/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1488.534701] env[63241]: DEBUG oslo_concurrency.lockutils [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.535459] env[63241]: DEBUG oslo_concurrency.lockutils [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.535673] env[63241]: DEBUG oslo_concurrency.lockutils [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.536342] env[63241]: DEBUG oslo_concurrency.lockutils [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] Acquired lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.536542] env[63241]: DEBUG nova.network.neutron [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Refreshing network info cache for port 6c744114-882e-410b-a8fd-10bac7d2be70 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1488.538651] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4fd434-fdc8-4f88-a190-61078f623a67 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.560385] env[63241]: DEBUG nova.virt.hardware [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1488.560754] env[63241]: DEBUG nova.virt.hardware [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1488.560906] env[63241]: DEBUG nova.virt.hardware [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1488.561158] env[63241]: DEBUG nova.virt.hardware [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1488.561341] env[63241]: DEBUG nova.virt.hardware [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1488.561857] env[63241]: DEBUG nova.virt.hardware [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1488.561857] env[63241]: DEBUG nova.virt.hardware [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1488.561857] env[63241]: DEBUG nova.virt.hardware [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1488.562114] env[63241]: DEBUG nova.virt.hardware [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1488.562501] env[63241]: DEBUG nova.virt.hardware [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1488.562501] env[63241]: DEBUG nova.virt.hardware [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1488.569474] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Reconfiguring VM to attach interface {{(pid=63241) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1488.570708] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efb7b659-5ca8-4716-88c1-c3655d7034cb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.595722] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820213, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.596363] env[63241]: DEBUG oslo_vmware.api [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1488.596363] env[63241]: value = "task-1820215" [ 1488.596363] env[63241]: _type = "Task" [ 1488.596363] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.608488] env[63241]: DEBUG oslo_vmware.api [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820215, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.630341] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7a672b9c-58e8-4811-8cf9-7e793db26df7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.878549] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquiring lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.878900] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.879206] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquiring lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.879490] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.879676] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.882804] env[63241]: INFO nova.compute.manager [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Terminating instance [ 1488.885128] env[63241]: DEBUG nova.compute.manager [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1488.885535] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1488.886863] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bbfd9f-4010-4036-a405-78d905791155 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.895753] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1488.896204] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77c1e90b-0693-459f-a475-945b612622d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.904992] env[63241]: DEBUG oslo_vmware.api [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for the task: (returnval){ [ 1488.904992] env[63241]: value = "task-1820216" [ 1488.904992] env[63241]: _type = "Task" [ 1488.904992] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.927464] env[63241]: DEBUG oslo_vmware.api [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820216, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.942876] env[63241]: DEBUG nova.scheduler.client.report [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1489.838712] env[63241]: DEBUG nova.network.neutron [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Successfully updated port: 0d383637-3a9e-4430-80c1-4b6b738e5817 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1489.840661] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.171s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.843162] env[63241]: DEBUG nova.compute.manager [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1489.857496] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.457s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.857496] env[63241]: DEBUG nova.objects.instance [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lazy-loading 'resources' on Instance uuid 2b1805b3-2e03-410f-8222-64b8542d4a43 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1489.859524] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquiring lock "a534b054-2143-41c4-a0fa-028339ecdbbf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.859906] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lock "a534b054-2143-41c4-a0fa-028339ecdbbf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.873544] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820213, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.738763} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.881937] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e3df56a7-eb82-4297-8aa3-f77c0380b6ec/e3df56a7-eb82-4297-8aa3-f77c0380b6ec.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1489.882889] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1489.882889] env[63241]: DEBUG oslo_vmware.api [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820216, 'name': PowerOffVM_Task, 'duration_secs': 0.319765} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.886060] env[63241]: DEBUG oslo_vmware.api [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820215, 'name': ReconfigVM_Task, 'duration_secs': 0.783582} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.886060] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d582197f-54e6-4f44-9d35-be1de3756e0c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.886212] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1489.886457] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1489.886937] env[63241]: DEBUG oslo_concurrency.lockutils [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.887356] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Reconfigured VM to attach interface {{(pid=63241) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1489.890657] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f4d854a-1e15-447e-8929-0267cff52c23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.899599] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1489.899599] env[63241]: value = "task-1820217" [ 1489.899599] env[63241]: _type = "Task" [ 1489.899599] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.910585] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820217, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.981570] env[63241]: DEBUG nova.compute.manager [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Received event network-vif-plugged-1e273a71-b8e7-4187-a1cd-c61f52ba3e85 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1489.981943] env[63241]: DEBUG oslo_concurrency.lockutils [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] Acquiring lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.982756] env[63241]: DEBUG oslo_concurrency.lockutils [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] Lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.982756] env[63241]: DEBUG oslo_concurrency.lockutils [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] Lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.982878] env[63241]: DEBUG nova.compute.manager [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] No waiting events found dispatching network-vif-plugged-1e273a71-b8e7-4187-a1cd-c61f52ba3e85 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1489.983009] env[63241]: WARNING nova.compute.manager [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Received unexpected event network-vif-plugged-1e273a71-b8e7-4187-a1cd-c61f52ba3e85 for instance with vm_state building and task_state spawning. [ 1489.983290] env[63241]: DEBUG nova.compute.manager [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Received event network-changed-1e273a71-b8e7-4187-a1cd-c61f52ba3e85 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1489.985239] env[63241]: DEBUG nova.compute.manager [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Refreshing instance network info cache due to event network-changed-1e273a71-b8e7-4187-a1cd-c61f52ba3e85. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1489.985239] env[63241]: DEBUG oslo_concurrency.lockutils [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] Acquiring lock "refresh_cache-e3df56a7-eb82-4297-8aa3-f77c0380b6ec" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.985239] env[63241]: DEBUG oslo_concurrency.lockutils [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] Acquired lock "refresh_cache-e3df56a7-eb82-4297-8aa3-f77c0380b6ec" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.985239] env[63241]: DEBUG nova.network.neutron [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Refreshing network info cache for port 1e273a71-b8e7-4187-a1cd-c61f52ba3e85 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1489.990255] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1489.990611] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1489.990889] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Deleting the datastore file [datastore1] 0b7c72e0-79b9-4435-9676-7a0e9afaf936 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1489.991208] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ad216ac-b1a6-4c3c-a2a6-19a2b42997f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.003622] env[63241]: DEBUG oslo_vmware.api [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for the task: (returnval){ [ 1490.003622] env[63241]: value = "task-1820219" [ 1490.003622] env[63241]: _type = "Task" [ 1490.003622] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.014412] env[63241]: DEBUG oslo_vmware.api [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.207786] env[63241]: DEBUG nova.network.neutron [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Updated VIF entry in instance network info cache for port 6c744114-882e-410b-a8fd-10bac7d2be70. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1490.208466] env[63241]: DEBUG nova.network.neutron [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Updating instance_info_cache with network_info: [{"id": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "address": "fa:16:3e:fd:57:de", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e94bb05-04", "ovs_interfaceid": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6c744114-882e-410b-a8fd-10bac7d2be70", "address": "fa:16:3e:8c:7f:12", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c744114-88", "ovs_interfaceid": "6c744114-882e-410b-a8fd-10bac7d2be70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.349456] env[63241]: DEBUG nova.compute.utils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1490.351426] env[63241]: DEBUG oslo_concurrency.lockutils [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Acquiring lock "refresh_cache-9361ee6a-7c4d-4409-bc3c-7da7d4550d97" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.351717] env[63241]: DEBUG oslo_concurrency.lockutils [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Acquired lock "refresh_cache-9361ee6a-7c4d-4409-bc3c-7da7d4550d97" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.351913] env[63241]: DEBUG nova.network.neutron [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1490.354196] env[63241]: DEBUG nova.compute.manager [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1490.354474] env[63241]: DEBUG nova.network.neutron [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1490.367652] env[63241]: DEBUG nova.compute.manager [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1490.396463] env[63241]: DEBUG oslo_concurrency.lockutils [None req-55139968-3249-408d-ac49-4abf2c12ddb3 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-27177719-5090-43de-9bca-6db6bebab7b4-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.760s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.412460] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820217, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098915} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.412783] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1490.413682] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f40ad02-b439-4e16-8915-19136afc66c3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.442601] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] e3df56a7-eb82-4297-8aa3-f77c0380b6ec/e3df56a7-eb82-4297-8aa3-f77c0380b6ec.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1490.445019] env[63241]: DEBUG nova.policy [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f52bf5741a490c83e01e06f686559e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c64d07a686b414f93ec4c599307498f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1490.448152] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03ebcd57-7561-4c79-90fe-aac9b060855f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.473438] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquiring lock "c3c278a8-0513-4a7f-881e-b71c70206860" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.473769] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Lock "c3c278a8-0513-4a7f-881e-b71c70206860" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.477034] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1490.477034] env[63241]: value = "task-1820220" [ 1490.477034] env[63241]: _type = "Task" [ 1490.477034] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.490100] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820220, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.517226] env[63241]: DEBUG oslo_vmware.api [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Task: {'id': task-1820219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328767} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.520397] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1490.520723] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1490.520934] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1490.521178] env[63241]: INFO nova.compute.manager [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1490.521471] env[63241]: DEBUG oslo.service.loopingcall [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1490.521982] env[63241]: DEBUG nova.compute.manager [-] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1490.522104] env[63241]: DEBUG nova.network.neutron [-] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1490.714549] env[63241]: DEBUG oslo_concurrency.lockutils [req-b6b129b3-06a2-4eee-ba2d-9d6bd1314d06 req-23ffad96-1032-42fe-8a41-118bfa747e0f service nova] Releasing lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.855924] env[63241]: DEBUG nova.compute.manager [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1490.867649] env[63241]: DEBUG nova.network.neutron [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Updated VIF entry in instance network info cache for port 1e273a71-b8e7-4187-a1cd-c61f52ba3e85. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1490.869209] env[63241]: DEBUG nova.network.neutron [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Updating instance_info_cache with network_info: [{"id": "1e273a71-b8e7-4187-a1cd-c61f52ba3e85", "address": "fa:16:3e:e3:9b:1d", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e273a71-b8", "ovs_interfaceid": "1e273a71-b8e7-4187-a1cd-c61f52ba3e85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.892878] env[63241]: DEBUG nova.network.neutron [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Successfully created port: 94476e06-60a2-4a38-8724-4dadaf22dfa0 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1490.905787] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.931385] env[63241]: DEBUG nova.network.neutron [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1490.948813] env[63241]: DEBUG nova.compute.manager [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1490.949509] env[63241]: DEBUG nova.virt.hardware [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1490.950219] env[63241]: DEBUG nova.virt.hardware [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1490.950219] env[63241]: DEBUG nova.virt.hardware [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1490.950219] env[63241]: DEBUG nova.virt.hardware [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1490.950219] env[63241]: DEBUG nova.virt.hardware [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1490.950445] env[63241]: DEBUG nova.virt.hardware [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1490.950589] env[63241]: DEBUG nova.virt.hardware [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1490.950759] env[63241]: DEBUG nova.virt.hardware [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1490.952407] env[63241]: DEBUG nova.virt.hardware [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1490.952407] env[63241]: DEBUG nova.virt.hardware [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1490.952407] env[63241]: DEBUG nova.virt.hardware [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1490.955896] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf37f55a-c77a-4cb4-8af1-ca33922e9300 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.972057] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e683afe7-3ef4-4eee-9f43-bdc5814c80c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.981551] env[63241]: DEBUG nova.compute.manager [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1491.010259] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820220, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.068262] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b7b942-e1aa-49f5-9a1a-94263a150c4c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.078278] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963bc4b1-e29e-4df2-89b6-e0d1215ae34f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.118458] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa947cb-1db7-425b-bc2f-55f8ffc9765e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.126628] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a541884-9530-4e21-985e-81e45c4be4da {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.145551] env[63241]: DEBUG nova.compute.provider_tree [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.155428] env[63241]: DEBUG nova.compute.manager [req-15918503-889f-468d-b49e-249831fc0c18 req-1751b48e-4b84-4188-b533-280a7bf6c813 service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Received event network-vif-plugged-0d383637-3a9e-4430-80c1-4b6b738e5817 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1491.155428] env[63241]: DEBUG oslo_concurrency.lockutils [req-15918503-889f-468d-b49e-249831fc0c18 req-1751b48e-4b84-4188-b533-280a7bf6c813 service nova] Acquiring lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.155428] env[63241]: DEBUG oslo_concurrency.lockutils [req-15918503-889f-468d-b49e-249831fc0c18 req-1751b48e-4b84-4188-b533-280a7bf6c813 service nova] Lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.155561] env[63241]: DEBUG oslo_concurrency.lockutils [req-15918503-889f-468d-b49e-249831fc0c18 req-1751b48e-4b84-4188-b533-280a7bf6c813 service nova] Lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.155623] env[63241]: DEBUG nova.compute.manager [req-15918503-889f-468d-b49e-249831fc0c18 req-1751b48e-4b84-4188-b533-280a7bf6c813 service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] No waiting events found dispatching network-vif-plugged-0d383637-3a9e-4430-80c1-4b6b738e5817 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1491.156157] env[63241]: WARNING nova.compute.manager [req-15918503-889f-468d-b49e-249831fc0c18 req-1751b48e-4b84-4188-b533-280a7bf6c813 service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Received unexpected event network-vif-plugged-0d383637-3a9e-4430-80c1-4b6b738e5817 for instance with vm_state building and task_state spawning. [ 1491.318414] env[63241]: DEBUG nova.network.neutron [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Updating instance_info_cache with network_info: [{"id": "0d383637-3a9e-4430-80c1-4b6b738e5817", "address": "fa:16:3e:66:40:ff", "network": {"id": "6869db06-fa2f-4bd1-85a1-474272099df2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1341715179-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33b2cbeab90443c48beaa0b41ba17c1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d383637-3a", "ovs_interfaceid": "0d383637-3a9e-4430-80c1-4b6b738e5817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.375961] env[63241]: DEBUG oslo_concurrency.lockutils [req-e320cfb7-9bf2-4641-8f4e-d699dc93b90b req-5bcb920d-82f3-4762-bfc5-913b8dfb643c service nova] Releasing lock "refresh_cache-e3df56a7-eb82-4297-8aa3-f77c0380b6ec" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.463757] env[63241]: DEBUG nova.network.neutron [-] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.507619] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820220, 'name': ReconfigVM_Task, 'duration_secs': 0.675338} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.508128] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Reconfigured VM instance instance-0000002b to attach disk [datastore1] e3df56a7-eb82-4297-8aa3-f77c0380b6ec/e3df56a7-eb82-4297-8aa3-f77c0380b6ec.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1491.509735] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51d8bcb2-58c9-44ab-8f8a-856a7aa9972d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.519676] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1491.519676] env[63241]: value = "task-1820221" [ 1491.519676] env[63241]: _type = "Task" [ 1491.519676] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.530222] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.534932] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820221, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.649991] env[63241]: DEBUG nova.scheduler.client.report [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1491.823669] env[63241]: DEBUG oslo_concurrency.lockutils [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Releasing lock "refresh_cache-9361ee6a-7c4d-4409-bc3c-7da7d4550d97" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.824088] env[63241]: DEBUG nova.compute.manager [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Instance network_info: |[{"id": "0d383637-3a9e-4430-80c1-4b6b738e5817", "address": "fa:16:3e:66:40:ff", "network": {"id": "6869db06-fa2f-4bd1-85a1-474272099df2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1341715179-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33b2cbeab90443c48beaa0b41ba17c1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d383637-3a", "ovs_interfaceid": "0d383637-3a9e-4430-80c1-4b6b738e5817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1491.824641] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:40:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1ce8361b-fd8e-4971-a37f-b84a4f77db19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d383637-3a9e-4430-80c1-4b6b738e5817', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1491.836108] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Creating folder: Project (33b2cbeab90443c48beaa0b41ba17c1c). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1491.836108] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce0c5880-41ac-45f0-bb4c-7e66780fc24b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.850082] env[63241]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1491.852018] env[63241]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63241) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1491.852018] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Folder already exists: Project (33b2cbeab90443c48beaa0b41ba17c1c). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1491.852018] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Creating folder: Instances. Parent ref: group-v376996. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1491.852018] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b59932e-e16e-4145-a8cf-e4c16b6def1f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.865087] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Created folder: Instances in parent group-v376996. [ 1491.865939] env[63241]: DEBUG oslo.service.loopingcall [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1491.866755] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1491.866755] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f284f764-0e28-442d-88a7-e76731ca3185 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.884797] env[63241]: DEBUG nova.compute.manager [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1491.897332] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1491.897332] env[63241]: value = "task-1820224" [ 1491.897332] env[63241]: _type = "Task" [ 1491.897332] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.903699] env[63241]: DEBUG oslo_concurrency.lockutils [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "5203c12e-14a0-4736-8185-8ead9a29b03b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.904301] env[63241]: DEBUG oslo_concurrency.lockutils [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "5203c12e-14a0-4736-8185-8ead9a29b03b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.904663] env[63241]: DEBUG oslo_concurrency.lockutils [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "5203c12e-14a0-4736-8185-8ead9a29b03b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.904993] env[63241]: DEBUG oslo_concurrency.lockutils [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "5203c12e-14a0-4736-8185-8ead9a29b03b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.905330] env[63241]: DEBUG oslo_concurrency.lockutils [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "5203c12e-14a0-4736-8185-8ead9a29b03b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.913827] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820224, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.914522] env[63241]: INFO nova.compute.manager [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Terminating instance [ 1491.918518] env[63241]: DEBUG nova.compute.manager [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1491.918774] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1491.920095] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e7f054-0970-4aae-91b7-fc165891e791 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.927140] env[63241]: DEBUG nova.virt.hardware [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1491.927140] env[63241]: DEBUG nova.virt.hardware [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1491.927140] env[63241]: DEBUG nova.virt.hardware [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1491.927140] env[63241]: DEBUG nova.virt.hardware [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1491.927140] env[63241]: DEBUG nova.virt.hardware [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1491.927140] env[63241]: DEBUG nova.virt.hardware [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1491.927140] env[63241]: DEBUG nova.virt.hardware [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1491.927460] env[63241]: DEBUG nova.virt.hardware [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1491.927662] env[63241]: DEBUG nova.virt.hardware [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1491.928197] env[63241]: DEBUG nova.virt.hardware [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1491.928197] env[63241]: DEBUG nova.virt.hardware [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1491.929915] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd37c57-4b91-4adc-9808-edb7f82adae8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.942915] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325d019b-bc1b-4c5c-a7c3-38a9cd0ea5ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.948235] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1491.948589] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2998a46-9fc9-4672-afda-fa79392519d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.964482] env[63241]: DEBUG oslo_vmware.api [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1491.964482] env[63241]: value = "task-1820225" [ 1491.964482] env[63241]: _type = "Task" [ 1491.964482] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.969122] env[63241]: INFO nova.compute.manager [-] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Took 1.45 seconds to deallocate network for instance. [ 1491.978402] env[63241]: DEBUG oslo_vmware.api [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.033035] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820221, 'name': Rename_Task, 'duration_secs': 0.260294} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.033418] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1492.033710] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f19af94-ebb4-45cb-806c-95f63b85c1e4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.041719] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1492.041719] env[63241]: value = "task-1820226" [ 1492.041719] env[63241]: _type = "Task" [ 1492.041719] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.052062] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820226, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.163713] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.307s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.166565] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.376s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.166893] env[63241]: DEBUG nova.objects.instance [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lazy-loading 'resources' on Instance uuid fbbb7682-873d-4bb0-8d39-4aec3566b0af {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1492.185696] env[63241]: INFO nova.scheduler.client.report [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Deleted allocations for instance 2b1805b3-2e03-410f-8222-64b8542d4a43 [ 1492.406020] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820224, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.475928] env[63241]: DEBUG oslo_vmware.api [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820225, 'name': PowerOffVM_Task, 'duration_secs': 0.308067} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.476492] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1492.476798] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1492.477191] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f57fa04d-de2f-4c96-bcbf-f7d818612371 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.479824] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.554253] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820226, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.617249] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1492.617775] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1492.618224] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleting the datastore file [datastore1] 5203c12e-14a0-4736-8185-8ead9a29b03b {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1492.618784] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39f25107-83fe-4e8b-a601-6cde178ea427 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.674695] env[63241]: DEBUG oslo_vmware.api [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1492.674695] env[63241]: value = "task-1820228" [ 1492.674695] env[63241]: _type = "Task" [ 1492.674695] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.685331] env[63241]: DEBUG oslo_vmware.api [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.694277] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bba7dc43-cbde-4d45-ada5-8e7fcf81e9c3 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.912s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.695790] env[63241]: DEBUG oslo_concurrency.lockutils [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 26.761s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.696351] env[63241]: DEBUG oslo_concurrency.lockutils [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "2b1805b3-2e03-410f-8222-64b8542d4a43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.696684] env[63241]: DEBUG oslo_concurrency.lockutils [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.696924] env[63241]: DEBUG oslo_concurrency.lockutils [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.702934] env[63241]: INFO nova.compute.manager [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Terminating instance [ 1492.705378] env[63241]: DEBUG oslo_concurrency.lockutils [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.705559] env[63241]: DEBUG oslo_concurrency.lockutils [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquired lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.705730] env[63241]: DEBUG nova.network.neutron [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1492.861472] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "interface-27177719-5090-43de-9bca-6db6bebab7b4-6c744114-882e-410b-a8fd-10bac7d2be70" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.865019] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-27177719-5090-43de-9bca-6db6bebab7b4-6c744114-882e-410b-a8fd-10bac7d2be70" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.891683] env[63241]: DEBUG nova.compute.manager [req-c960b047-4a4b-412a-9914-105d1b4cbb6a req-fd4192b2-2bcd-41b5-a5f5-13782e1d23f5 service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Received event network-vif-plugged-94476e06-60a2-4a38-8724-4dadaf22dfa0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1492.891969] env[63241]: DEBUG oslo_concurrency.lockutils [req-c960b047-4a4b-412a-9914-105d1b4cbb6a req-fd4192b2-2bcd-41b5-a5f5-13782e1d23f5 service nova] Acquiring lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.892236] env[63241]: DEBUG oslo_concurrency.lockutils [req-c960b047-4a4b-412a-9914-105d1b4cbb6a req-fd4192b2-2bcd-41b5-a5f5-13782e1d23f5 service nova] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.892423] env[63241]: DEBUG oslo_concurrency.lockutils [req-c960b047-4a4b-412a-9914-105d1b4cbb6a req-fd4192b2-2bcd-41b5-a5f5-13782e1d23f5 service nova] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.892642] env[63241]: DEBUG nova.compute.manager [req-c960b047-4a4b-412a-9914-105d1b4cbb6a req-fd4192b2-2bcd-41b5-a5f5-13782e1d23f5 service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] No waiting events found dispatching network-vif-plugged-94476e06-60a2-4a38-8724-4dadaf22dfa0 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1492.892844] env[63241]: WARNING nova.compute.manager [req-c960b047-4a4b-412a-9914-105d1b4cbb6a req-fd4192b2-2bcd-41b5-a5f5-13782e1d23f5 service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Received unexpected event network-vif-plugged-94476e06-60a2-4a38-8724-4dadaf22dfa0 for instance with vm_state building and task_state spawning. [ 1492.911372] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820224, 'name': CreateVM_Task, 'duration_secs': 0.576198} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.911595] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1492.912409] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377005', 'volume_id': 'f8c6db70-e484-49c7-8211-edd49f1c6d75', 'name': 'volume-f8c6db70-e484-49c7-8211-edd49f1c6d75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9361ee6a-7c4d-4409-bc3c-7da7d4550d97', 'attached_at': '', 'detached_at': '', 'volume_id': 'f8c6db70-e484-49c7-8211-edd49f1c6d75', 'serial': 'f8c6db70-e484-49c7-8211-edd49f1c6d75'}, 'boot_index': 0, 'attachment_id': 'b6ac45ed-08a1-421f-a5bc-d2e199222d7a', 'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=63241) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1492.913299] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Root volume attach. Driver type: vmdk {{(pid=63241) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1492.919030] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e776cc-9bc3-4384-88ea-941f92ca992d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.929255] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5f4ce2-eb14-45b2-83b2-c31bdfb1ce93 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.945260] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86fd260-e72f-4d74-a9a8-ec198e3b59ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.947731] env[63241]: DEBUG nova.network.neutron [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Successfully updated port: 94476e06-60a2-4a38-8724-4dadaf22dfa0 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1492.958204] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-e9bb983d-dde6-4afa-a956-ee2eea5dede3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.969205] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Waiting for the task: (returnval){ [ 1492.969205] env[63241]: value = "task-1820229" [ 1492.969205] env[63241]: _type = "Task" [ 1492.969205] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.982502] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820229, 'name': RelocateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.057016] env[63241]: DEBUG oslo_vmware.api [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820226, 'name': PowerOnVM_Task, 'duration_secs': 0.782958} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.057401] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1493.057625] env[63241]: INFO nova.compute.manager [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Took 9.33 seconds to spawn the instance on the hypervisor. [ 1493.057826] env[63241]: DEBUG nova.compute.manager [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1493.058889] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97aeac6a-f11e-435c-b97c-e0aafd136cb7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.188367] env[63241]: DEBUG oslo_vmware.api [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311296} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.188662] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1493.188852] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1493.189108] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1493.189260] env[63241]: INFO nova.compute.manager [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1493.189526] env[63241]: DEBUG oslo.service.loopingcall [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.189715] env[63241]: DEBUG nova.compute.manager [-] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1493.189808] env[63241]: DEBUG nova.network.neutron [-] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1493.215271] env[63241]: DEBUG nova.compute.utils [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Can not refresh info_cache because instance was not found {{(pid=63241) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1493.250172] env[63241]: DEBUG nova.network.neutron [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1493.259174] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7390c7fb-dd42-49c7-bc37-6c86e1572c90 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.270446] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9210792-1de4-4bd4-ad89-8d4b65b6d570 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.302708] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e4bac1-7baa-445d-ae45-ea5f81ea32b5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.312332] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de5d8fd-8193-4681-bb62-035c9f7d5e4e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.327023] env[63241]: DEBUG nova.compute.provider_tree [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1493.370025] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.370216] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.371167] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0c93ae-d337-4c98-91ba-cc7db384c7b3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.392026] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df53adc-976c-446f-a404-38229c3de6dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.422732] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Reconfiguring VM to detach interface {{(pid=63241) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1493.423317] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f18c22c-bac6-4366-aae3-1e67746cf734 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.445858] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1493.445858] env[63241]: value = "task-1820230" [ 1493.445858] env[63241]: _type = "Task" [ 1493.445858] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.451146] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "refresh_cache-2d1425f2-ddf9-4e82-bcfe-e11c597d011a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.451629] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "refresh_cache-2d1425f2-ddf9-4e82-bcfe-e11c597d011a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.451629] env[63241]: DEBUG nova.network.neutron [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1493.457666] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.481827] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820229, 'name': RelocateVM_Task, 'duration_secs': 0.46209} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.482233] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1493.482438] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377005', 'volume_id': 'f8c6db70-e484-49c7-8211-edd49f1c6d75', 'name': 'volume-f8c6db70-e484-49c7-8211-edd49f1c6d75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9361ee6a-7c4d-4409-bc3c-7da7d4550d97', 'attached_at': '', 'detached_at': '', 'volume_id': 'f8c6db70-e484-49c7-8211-edd49f1c6d75', 'serial': 'f8c6db70-e484-49c7-8211-edd49f1c6d75'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1493.483354] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9879a43b-3998-4f49-acd0-16d439dfbfc6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.502159] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652d713c-8e78-4a45-be3a-e29b96942942 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.529957] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] volume-f8c6db70-e484-49c7-8211-edd49f1c6d75/volume-f8c6db70-e484-49c7-8211-edd49f1c6d75.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1493.531449] env[63241]: DEBUG nova.network.neutron [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.532822] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e59acdb6-c6f0-416c-9dbd-307249328961 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.556915] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Waiting for the task: (returnval){ [ 1493.556915] env[63241]: value = "task-1820231" [ 1493.556915] env[63241]: _type = "Task" [ 1493.556915] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.564894] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.586649] env[63241]: INFO nova.compute.manager [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Took 37.94 seconds to build instance. [ 1493.599976] env[63241]: DEBUG nova.compute.manager [req-9edc0170-922c-4bb8-ad99-3fb22850c9dc req-e1e2ce5f-332a-4d47-8124-d998b9ff061c service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Received event network-changed-0d383637-3a9e-4430-80c1-4b6b738e5817 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1493.600202] env[63241]: DEBUG nova.compute.manager [req-9edc0170-922c-4bb8-ad99-3fb22850c9dc req-e1e2ce5f-332a-4d47-8124-d998b9ff061c service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Refreshing instance network info cache due to event network-changed-0d383637-3a9e-4430-80c1-4b6b738e5817. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1493.600449] env[63241]: DEBUG oslo_concurrency.lockutils [req-9edc0170-922c-4bb8-ad99-3fb22850c9dc req-e1e2ce5f-332a-4d47-8124-d998b9ff061c service nova] Acquiring lock "refresh_cache-9361ee6a-7c4d-4409-bc3c-7da7d4550d97" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.600855] env[63241]: DEBUG oslo_concurrency.lockutils [req-9edc0170-922c-4bb8-ad99-3fb22850c9dc req-e1e2ce5f-332a-4d47-8124-d998b9ff061c service nova] Acquired lock "refresh_cache-9361ee6a-7c4d-4409-bc3c-7da7d4550d97" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.601133] env[63241]: DEBUG nova.network.neutron [req-9edc0170-922c-4bb8-ad99-3fb22850c9dc req-e1e2ce5f-332a-4d47-8124-d998b9ff061c service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Refreshing network info cache for port 0d383637-3a9e-4430-80c1-4b6b738e5817 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1493.830369] env[63241]: DEBUG nova.scheduler.client.report [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1493.961242] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.012304] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "5060e745-08d0-429e-8780-bfdad7a29f30" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.012304] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "5060e745-08d0-429e-8780-bfdad7a29f30" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.012304] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "5060e745-08d0-429e-8780-bfdad7a29f30-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.012304] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "5060e745-08d0-429e-8780-bfdad7a29f30-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.012304] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "5060e745-08d0-429e-8780-bfdad7a29f30-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.014442] env[63241]: INFO nova.compute.manager [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Terminating instance [ 1494.020964] env[63241]: DEBUG nova.network.neutron [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1494.022399] env[63241]: DEBUG nova.compute.manager [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1494.022580] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1494.024298] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba0599e-9a47-4260-8cb4-1fde3490f2e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.039618] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1494.040077] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69168fed-2810-4f04-b469-6ff585181028 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.052909] env[63241]: DEBUG oslo_concurrency.lockutils [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Releasing lock "refresh_cache-2b1805b3-2e03-410f-8222-64b8542d4a43" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.053424] env[63241]: DEBUG nova.compute.manager [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1494.053658] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1494.054198] env[63241]: DEBUG oslo_vmware.api [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1494.054198] env[63241]: value = "task-1820232" [ 1494.054198] env[63241]: _type = "Task" [ 1494.054198] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.054507] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a073c7ed-fa38-4b7a-942f-fabf60bbf709 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.072764] env[63241]: DEBUG oslo_vmware.api [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.086022] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820231, 'name': ReconfigVM_Task, 'duration_secs': 0.290709} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.087818] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59350c8a-68bc-4902-9bc8-5a63a7491fd7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.101973] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Reconfigured VM instance instance-0000002c to attach disk [datastore1] volume-f8c6db70-e484-49c7-8211-edd49f1c6d75/volume-f8c6db70-e484-49c7-8211-edd49f1c6d75.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1494.110296] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7d0b71ec-73f8-4e29-901e-4b8929201131 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.561s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.112276] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8b3217d-1a84-4bc8-8be7-15350d744706 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.132099] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Waiting for the task: (returnval){ [ 1494.132099] env[63241]: value = "task-1820233" [ 1494.132099] env[63241]: _type = "Task" [ 1494.132099] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.152180] env[63241]: WARNING nova.virt.vmwareapi.vmops [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2b1805b3-2e03-410f-8222-64b8542d4a43 could not be found. [ 1494.152417] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1494.152610] env[63241]: INFO nova.compute.manager [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Took 0.10 seconds to destroy the instance on the hypervisor. [ 1494.152883] env[63241]: DEBUG oslo.service.loopingcall [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1494.156489] env[63241]: DEBUG nova.compute.manager [-] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1494.156672] env[63241]: DEBUG nova.network.neutron [-] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1494.158493] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.175382] env[63241]: DEBUG nova.network.neutron [-] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1494.235943] env[63241]: DEBUG nova.network.neutron [-] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.336361] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.170s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.338930] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.402s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.339130] env[63241]: DEBUG nova.objects.instance [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lazy-loading 'resources' on Instance uuid 78894fda-8309-430a-ab38-ce1a415d83d3 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1494.366091] env[63241]: INFO nova.scheduler.client.report [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Deleted allocations for instance fbbb7682-873d-4bb0-8d39-4aec3566b0af [ 1494.420069] env[63241]: DEBUG nova.network.neutron [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Updating instance_info_cache with network_info: [{"id": "94476e06-60a2-4a38-8724-4dadaf22dfa0", "address": "fa:16:3e:09:ab:6e", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94476e06-60", "ovs_interfaceid": "94476e06-60a2-4a38-8724-4dadaf22dfa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.456898] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.573585] env[63241]: DEBUG oslo_vmware.api [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820232, 'name': PowerOffVM_Task, 'duration_secs': 0.256349} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.573930] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1494.574096] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1494.574365] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11cfbce8-65d4-45a5-a047-5f49894b3a35 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.584560] env[63241]: DEBUG nova.network.neutron [req-9edc0170-922c-4bb8-ad99-3fb22850c9dc req-e1e2ce5f-332a-4d47-8124-d998b9ff061c service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Updated VIF entry in instance network info cache for port 0d383637-3a9e-4430-80c1-4b6b738e5817. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1494.584922] env[63241]: DEBUG nova.network.neutron [req-9edc0170-922c-4bb8-ad99-3fb22850c9dc req-e1e2ce5f-332a-4d47-8124-d998b9ff061c service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Updating instance_info_cache with network_info: [{"id": "0d383637-3a9e-4430-80c1-4b6b738e5817", "address": "fa:16:3e:66:40:ff", "network": {"id": "6869db06-fa2f-4bd1-85a1-474272099df2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1341715179-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33b2cbeab90443c48beaa0b41ba17c1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d383637-3a", "ovs_interfaceid": "0d383637-3a9e-4430-80c1-4b6b738e5817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.642352] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820233, 'name': ReconfigVM_Task, 'duration_secs': 0.197511} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.642671] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377005', 'volume_id': 'f8c6db70-e484-49c7-8211-edd49f1c6d75', 'name': 'volume-f8c6db70-e484-49c7-8211-edd49f1c6d75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9361ee6a-7c4d-4409-bc3c-7da7d4550d97', 'attached_at': '', 'detached_at': '', 'volume_id': 'f8c6db70-e484-49c7-8211-edd49f1c6d75', 'serial': 'f8c6db70-e484-49c7-8211-edd49f1c6d75'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1494.643236] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f5f6182-0eb0-4b8a-8ecb-77e155f37d7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.650726] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Waiting for the task: (returnval){ [ 1494.650726] env[63241]: value = "task-1820235" [ 1494.650726] env[63241]: _type = "Task" [ 1494.650726] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.660434] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820235, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.683110] env[63241]: DEBUG nova.network.neutron [-] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.738902] env[63241]: INFO nova.compute.manager [-] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Took 1.55 seconds to deallocate network for instance. [ 1494.761131] env[63241]: DEBUG oslo_concurrency.lockutils [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "0c72c98b-57f0-44e5-9159-490b27eac3a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.761447] env[63241]: DEBUG oslo_concurrency.lockutils [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "0c72c98b-57f0-44e5-9159-490b27eac3a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.762191] env[63241]: DEBUG oslo_concurrency.lockutils [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "0c72c98b-57f0-44e5-9159-490b27eac3a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.762191] env[63241]: DEBUG oslo_concurrency.lockutils [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "0c72c98b-57f0-44e5-9159-490b27eac3a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.762191] env[63241]: DEBUG oslo_concurrency.lockutils [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "0c72c98b-57f0-44e5-9159-490b27eac3a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.765393] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1494.765695] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1494.765926] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Deleting the datastore file [datastore1] 5060e745-08d0-429e-8780-bfdad7a29f30 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1494.766703] env[63241]: INFO nova.compute.manager [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Terminating instance [ 1494.768532] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cbd8272b-b32a-4268-a3ab-62042ddf9fee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.772675] env[63241]: DEBUG nova.compute.manager [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1494.772976] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1494.774564] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e06acc-4573-441a-a76b-0851620e5519 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.784836] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1494.787102] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4ad7bda-8b41-4eaa-be46-aaacc64db9f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.789950] env[63241]: DEBUG oslo_vmware.api [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1494.789950] env[63241]: value = "task-1820236" [ 1494.789950] env[63241]: _type = "Task" [ 1494.789950] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.799032] env[63241]: DEBUG oslo_vmware.api [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1494.799032] env[63241]: value = "task-1820237" [ 1494.799032] env[63241]: _type = "Task" [ 1494.799032] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.803516] env[63241]: DEBUG oslo_vmware.api [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820236, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.815802] env[63241]: DEBUG oslo_vmware.api [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.877893] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1917c5b-b157-4d93-ba55-550c531a2716 tempest-ServersAdminNegativeTestJSON-1751779954 tempest-ServersAdminNegativeTestJSON-1751779954-project-member] Lock "fbbb7682-873d-4bb0-8d39-4aec3566b0af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.125s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.923214] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "refresh_cache-2d1425f2-ddf9-4e82-bcfe-e11c597d011a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.923624] env[63241]: DEBUG nova.compute.manager [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Instance network_info: |[{"id": "94476e06-60a2-4a38-8724-4dadaf22dfa0", "address": "fa:16:3e:09:ab:6e", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94476e06-60", "ovs_interfaceid": "94476e06-60a2-4a38-8724-4dadaf22dfa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1494.924113] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:ab:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dacd109c-2442-41b8-b612-7ed3efbdaa94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94476e06-60a2-4a38-8724-4dadaf22dfa0', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1494.933301] env[63241]: DEBUG oslo.service.loopingcall [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1494.936099] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1494.936807] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e192331f-551b-4066-a9f9-f97614e9df11 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.967805] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.974110] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1494.974110] env[63241]: value = "task-1820238" [ 1494.974110] env[63241]: _type = "Task" [ 1494.974110] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.982561] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820238, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.088860] env[63241]: DEBUG oslo_concurrency.lockutils [req-9edc0170-922c-4bb8-ad99-3fb22850c9dc req-e1e2ce5f-332a-4d47-8124-d998b9ff061c service nova] Releasing lock "refresh_cache-9361ee6a-7c4d-4409-bc3c-7da7d4550d97" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.088860] env[63241]: DEBUG nova.compute.manager [req-9edc0170-922c-4bb8-ad99-3fb22850c9dc req-e1e2ce5f-332a-4d47-8124-d998b9ff061c service nova] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Received event network-vif-deleted-c65a81e1-9494-4ac5-b371-ba4abad9643b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1495.161783] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820235, 'name': Rename_Task, 'duration_secs': 0.177963} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.164851] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1495.165387] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb5f5e81-0d84-405f-b70a-353b230d9797 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.177298] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Waiting for the task: (returnval){ [ 1495.177298] env[63241]: value = "task-1820239" [ 1495.177298] env[63241]: _type = "Task" [ 1495.177298] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.187040] env[63241]: INFO nova.compute.manager [-] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Took 1.03 seconds to deallocate network for instance. [ 1495.188870] env[63241]: DEBUG nova.compute.manager [req-467354f4-c7b2-426d-84eb-d9e3270ea2c0 req-8146ae30-9e2e-4d17-b548-f1dd8f8a3b11 service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Received event network-changed-94476e06-60a2-4a38-8724-4dadaf22dfa0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1495.189586] env[63241]: DEBUG nova.compute.manager [req-467354f4-c7b2-426d-84eb-d9e3270ea2c0 req-8146ae30-9e2e-4d17-b548-f1dd8f8a3b11 service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Refreshing instance network info cache due to event network-changed-94476e06-60a2-4a38-8724-4dadaf22dfa0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1495.189586] env[63241]: DEBUG oslo_concurrency.lockutils [req-467354f4-c7b2-426d-84eb-d9e3270ea2c0 req-8146ae30-9e2e-4d17-b548-f1dd8f8a3b11 service nova] Acquiring lock "refresh_cache-2d1425f2-ddf9-4e82-bcfe-e11c597d011a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.189718] env[63241]: DEBUG oslo_concurrency.lockutils [req-467354f4-c7b2-426d-84eb-d9e3270ea2c0 req-8146ae30-9e2e-4d17-b548-f1dd8f8a3b11 service nova] Acquired lock "refresh_cache-2d1425f2-ddf9-4e82-bcfe-e11c597d011a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.189839] env[63241]: DEBUG nova.network.neutron [req-467354f4-c7b2-426d-84eb-d9e3270ea2c0 req-8146ae30-9e2e-4d17-b548-f1dd8f8a3b11 service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Refreshing network info cache for port 94476e06-60a2-4a38-8724-4dadaf22dfa0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1495.210598] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820239, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.246346] env[63241]: DEBUG oslo_concurrency.lockutils [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.312516] env[63241]: DEBUG oslo_vmware.api [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820236, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.339071} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.312787] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1495.312959] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1495.313151] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1495.313326] env[63241]: INFO nova.compute.manager [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1495.315022] env[63241]: DEBUG oslo.service.loopingcall [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.315022] env[63241]: DEBUG nova.compute.manager [-] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1495.315022] env[63241]: DEBUG nova.network.neutron [-] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1495.320559] env[63241]: DEBUG oslo_vmware.api [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820237, 'name': PowerOffVM_Task, 'duration_secs': 0.320638} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.321262] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1495.321262] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1495.321539] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34f3c41b-7b60-4177-9cbd-e69710768e36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.450528] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c482576d-da1b-4988-a369-48a9d256366e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.463485] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e6b658-00c1-4743-8134-1613dcf796ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.475930] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.510578] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab66510a-bdc3-4984-87b6-eae98d9d3c35 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.522714] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19e00bc-ac47-43bd-9882-97af2f84b208 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.528072] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820238, 'name': CreateVM_Task, 'duration_secs': 0.524434} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.528072] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1495.529635] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.531969] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.531969] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1495.531969] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4f85e1f-1eb0-48e1-828e-15818a6d6e0f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.540948] env[63241]: DEBUG nova.compute.provider_tree [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1495.547875] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1495.547875] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ba6cc4-de04-de22-fa87-dea32f737f6c" [ 1495.547875] env[63241]: _type = "Task" [ 1495.547875] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.554273] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1495.554517] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1495.554758] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Deleting the datastore file [datastore1] 0c72c98b-57f0-44e5-9159-490b27eac3a6 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1495.555445] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b0e4f0f-0875-46b3-8952-32d72e132aca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.561977] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ba6cc4-de04-de22-fa87-dea32f737f6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.567498] env[63241]: DEBUG oslo_vmware.api [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for the task: (returnval){ [ 1495.567498] env[63241]: value = "task-1820241" [ 1495.567498] env[63241]: _type = "Task" [ 1495.567498] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.576876] env[63241]: DEBUG oslo_vmware.api [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.681025] env[63241]: INFO nova.compute.manager [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Rebuilding instance [ 1495.698204] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820239, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.704517] env[63241]: INFO nova.compute.manager [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Instance disappeared during terminate [ 1495.704517] env[63241]: DEBUG oslo_concurrency.lockutils [None req-018d07da-10f6-48f6-9eee-1f94016c6462 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "2b1805b3-2e03-410f-8222-64b8542d4a43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.008s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.775164] env[63241]: DEBUG nova.compute.manager [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1495.777599] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11ded84-1b91-4276-963c-4cd46e1452e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.970673] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.065259] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ba6cc4-de04-de22-fa87-dea32f737f6c, 'name': SearchDatastore_Task, 'duration_secs': 0.014682} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.065259] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.065259] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1496.065259] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.066201] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.066764] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1496.070023] env[63241]: ERROR nova.scheduler.client.report [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [req-0b95c576-d1ae-4872-a59e-56533a627d66] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0b95c576-d1ae-4872-a59e-56533a627d66"}]} [ 1496.070023] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d0dcd0c-5298-4ed3-97d5-9663bdf7a3c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.087434] env[63241]: DEBUG oslo_vmware.api [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Task: {'id': task-1820241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210098} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.087723] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1496.088369] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1496.088369] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1496.088369] env[63241]: INFO nova.compute.manager [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1496.090934] env[63241]: DEBUG oslo.service.loopingcall [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1496.090934] env[63241]: DEBUG nova.compute.manager [-] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1496.090934] env[63241]: DEBUG nova.network.neutron [-] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1496.092347] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1496.092541] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1496.093402] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ac3a476-0c05-4379-9b3e-844f5e53348f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.101290] env[63241]: DEBUG nova.scheduler.client.report [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1496.107052] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1496.107052] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52686b36-a6bb-6a09-e214-a84eb6a6c785" [ 1496.107052] env[63241]: _type = "Task" [ 1496.107052] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.124320] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52686b36-a6bb-6a09-e214-a84eb6a6c785, 'name': SearchDatastore_Task, 'duration_secs': 0.013485} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.131854] env[63241]: DEBUG nova.scheduler.client.report [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1496.132110] env[63241]: DEBUG nova.compute.provider_tree [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1496.135414] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fc5c523-7793-4f38-b5ed-5834912cebd7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.146775] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1496.146775] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52881b76-0c6b-a969-a386-4a722350cb2e" [ 1496.146775] env[63241]: _type = "Task" [ 1496.146775] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.157701] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52881b76-0c6b-a969-a386-4a722350cb2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.159951] env[63241]: DEBUG nova.scheduler.client.report [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1496.180399] env[63241]: DEBUG nova.scheduler.client.report [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1496.195881] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820239, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.289285] env[63241]: DEBUG nova.network.neutron [req-467354f4-c7b2-426d-84eb-d9e3270ea2c0 req-8146ae30-9e2e-4d17-b548-f1dd8f8a3b11 service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Updated VIF entry in instance network info cache for port 94476e06-60a2-4a38-8724-4dadaf22dfa0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1496.289684] env[63241]: DEBUG nova.network.neutron [req-467354f4-c7b2-426d-84eb-d9e3270ea2c0 req-8146ae30-9e2e-4d17-b548-f1dd8f8a3b11 service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Updating instance_info_cache with network_info: [{"id": "94476e06-60a2-4a38-8724-4dadaf22dfa0", "address": "fa:16:3e:09:ab:6e", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94476e06-60", "ovs_interfaceid": "94476e06-60a2-4a38-8724-4dadaf22dfa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.301402] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1496.305092] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aeecb2be-4672-4669-8579-7612a08145f1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.316158] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1496.316158] env[63241]: value = "task-1820242" [ 1496.316158] env[63241]: _type = "Task" [ 1496.316158] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.327887] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820242, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.470980] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.636186] env[63241]: DEBUG nova.network.neutron [-] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.660737] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52881b76-0c6b-a969-a386-4a722350cb2e, 'name': SearchDatastore_Task, 'duration_secs': 0.013165} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.662059] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.662356] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 2d1425f2-ddf9-4e82-bcfe-e11c597d011a/2d1425f2-ddf9-4e82-bcfe-e11c597d011a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1496.663094] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81864c91-acfc-49e3-b000-4145ac0d7dbd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.672051] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1496.672051] env[63241]: value = "task-1820243" [ 1496.672051] env[63241]: _type = "Task" [ 1496.672051] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.685877] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.698927] env[63241]: DEBUG oslo_vmware.api [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820239, 'name': PowerOnVM_Task, 'duration_secs': 1.216719} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.699202] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1496.699571] env[63241]: INFO nova.compute.manager [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Took 5.75 seconds to spawn the instance on the hypervisor. [ 1496.699683] env[63241]: DEBUG nova.compute.manager [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1496.700510] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ad5777-5418-4274-8e70-07aa953b7cf3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.793075] env[63241]: DEBUG oslo_concurrency.lockutils [req-467354f4-c7b2-426d-84eb-d9e3270ea2c0 req-8146ae30-9e2e-4d17-b548-f1dd8f8a3b11 service nova] Releasing lock "refresh_cache-2d1425f2-ddf9-4e82-bcfe-e11c597d011a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.793392] env[63241]: DEBUG nova.compute.manager [req-467354f4-c7b2-426d-84eb-d9e3270ea2c0 req-8146ae30-9e2e-4d17-b548-f1dd8f8a3b11 service nova] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Received event network-vif-deleted-032d2ad1-d0e9-4e9f-9ab4-654170139b7a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1496.797913] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaafd4b1-b5c4-4c46-a2cc-c161a3e80c15 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.806785] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea18e550-742c-4b58-a899-785a39a3028e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.847129] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95dad7d0-ae55-4a33-9eb6-06bb8f1b6a4d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.851383] env[63241]: DEBUG nova.compute.manager [req-1638be51-9550-4040-836e-3e5c9382e9b8 req-19795d7e-5b9b-46e9-af75-3b90750cd52c service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Received event network-vif-deleted-4e927ec6-c091-40ea-8276-55eb762b414d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1496.851657] env[63241]: INFO nova.compute.manager [req-1638be51-9550-4040-836e-3e5c9382e9b8 req-19795d7e-5b9b-46e9-af75-3b90750cd52c service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Neutron deleted interface 4e927ec6-c091-40ea-8276-55eb762b414d; detaching it from the instance and deleting it from the info cache [ 1496.851876] env[63241]: DEBUG nova.network.neutron [req-1638be51-9550-4040-836e-3e5c9382e9b8 req-19795d7e-5b9b-46e9-af75-3b90750cd52c service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.860103] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820242, 'name': PowerOffVM_Task, 'duration_secs': 0.292582} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.862682] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1496.863596] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1496.864717] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff41b91-9ca7-4140-abea-deb5ecb6bcd2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.869238] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbae7de9-2660-4ce6-b016-f56dce2f9f7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.888554] env[63241]: DEBUG nova.compute.provider_tree [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1496.892838] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1496.893393] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d1332b9-d092-48d2-8560-6c7c0da61bd5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.946932] env[63241]: DEBUG nova.network.neutron [-] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.971681] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.100955] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1497.101568] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1497.101760] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleting the datastore file [datastore1] e3df56a7-eb82-4297-8aa3-f77c0380b6ec {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1497.101982] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba59d0e5-db4d-4ae4-9a07-2681eabd5cfa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.112548] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1497.112548] env[63241]: value = "task-1820245" [ 1497.112548] env[63241]: _type = "Task" [ 1497.112548] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.125132] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820245, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.140075] env[63241]: INFO nova.compute.manager [-] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Took 1.83 seconds to deallocate network for instance. [ 1497.185353] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820243, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.227036] env[63241]: INFO nova.compute.manager [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Took 38.32 seconds to build instance. [ 1497.356877] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-687b8186-5236-4687-b886-144c3d6500c6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.363955] env[63241]: DEBUG nova.compute.manager [req-eb97efb8-1803-40a2-b7ae-d77227f80fb9 req-067c0cbe-945f-4bdf-929d-9b50d1585705 service nova] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Received event network-vif-deleted-34cd3144-d5ff-455a-a20e-09fe6e3896ba {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1497.370792] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8480ef-6b16-42cc-8ae8-7ed97e4c717b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.411974] env[63241]: DEBUG nova.compute.manager [req-1638be51-9550-4040-836e-3e5c9382e9b8 req-19795d7e-5b9b-46e9-af75-3b90750cd52c service nova] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Detach interface failed, port_id=4e927ec6-c091-40ea-8276-55eb762b414d, reason: Instance 0c72c98b-57f0-44e5-9159-490b27eac3a6 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1497.424361] env[63241]: DEBUG nova.scheduler.client.report [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 63 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1497.424617] env[63241]: DEBUG nova.compute.provider_tree [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 63 to 64 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1497.424799] env[63241]: DEBUG nova.compute.provider_tree [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1497.449327] env[63241]: INFO nova.compute.manager [-] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Took 1.36 seconds to deallocate network for instance. [ 1497.457354] env[63241]: DEBUG oslo_vmware.rw_handles [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffa4df-d58f-e8c7-d3ad-909d453c8bde/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1497.457791] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f17b8b7-213d-4728-852b-5c335a68eb48 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.468937] env[63241]: DEBUG oslo_vmware.rw_handles [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffa4df-d58f-e8c7-d3ad-909d453c8bde/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1497.469140] env[63241]: ERROR oslo_vmware.rw_handles [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffa4df-d58f-e8c7-d3ad-909d453c8bde/disk-0.vmdk due to incomplete transfer. [ 1497.472708] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3e2af007-49b4-433d-b2e4-d4381aa860ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.477460] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.485884] env[63241]: DEBUG oslo_vmware.rw_handles [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ffa4df-d58f-e8c7-d3ad-909d453c8bde/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1497.486117] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Uploaded image 29b41d3f-4ad1-4bd1-9f5e-450efb73d68d to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1497.488776] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1497.489432] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-eb805cc1-2d23-4c7b-884a-092bda09a065 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.497560] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1497.497560] env[63241]: value = "task-1820246" [ 1497.497560] env[63241]: _type = "Task" [ 1497.497560] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.506257] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820246, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.625789] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820245, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.650315] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.651894] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Acquiring lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.652456] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.653314] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Acquiring lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.653314] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.653314] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.655848] env[63241]: INFO nova.compute.manager [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Terminating instance [ 1497.664870] env[63241]: DEBUG nova.compute.manager [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1497.665163] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1497.665426] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-216a1956-3d93-4101-a513-c9e68700de11 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.677156] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Waiting for the task: (returnval){ [ 1497.677156] env[63241]: value = "task-1820247" [ 1497.677156] env[63241]: _type = "Task" [ 1497.677156] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.695426] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1820247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.701246] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820243, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.674922} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.702344] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 2d1425f2-ddf9-4e82-bcfe-e11c597d011a/2d1425f2-ddf9-4e82-bcfe-e11c597d011a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1497.702344] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1497.702517] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc76d500-c06a-439c-b205-7760d221ab9d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.712896] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1497.712896] env[63241]: value = "task-1820248" [ 1497.712896] env[63241]: _type = "Task" [ 1497.712896] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.727378] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820248, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.730247] env[63241]: DEBUG oslo_concurrency.lockutils [None req-57cd575c-5407-4a3f-aff7-2619c92e547b tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.528s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.930189] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.591s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.932408] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.715s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.937019] env[63241]: INFO nova.compute.claims [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1497.955981] env[63241]: DEBUG oslo_concurrency.lockutils [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.960244] env[63241]: INFO nova.scheduler.client.report [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Deleted allocations for instance 78894fda-8309-430a-ab38-ce1a415d83d3 [ 1497.985336] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.009957] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820246, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.129322] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820245, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.705681} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.129322] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1498.129322] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1498.129322] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1498.194061] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1820247, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.224509] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820248, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088623} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.224988] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1498.226117] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee310b2b-188e-4f3e-b9b7-41d35a2398a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.254783] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 2d1425f2-ddf9-4e82-bcfe-e11c597d011a/2d1425f2-ddf9-4e82-bcfe-e11c597d011a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1498.255122] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28bbe9ca-cab3-4ab0-a9cf-5106054f2483 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.278811] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1498.278811] env[63241]: value = "task-1820249" [ 1498.278811] env[63241]: _type = "Task" [ 1498.278811] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.288281] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820249, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.474817] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d09a8474-8758-4df4-ae05-7eb08b1098ef tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "78894fda-8309-430a-ab38-ce1a415d83d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.018s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.481972] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.510461] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820246, 'name': Destroy_Task, 'duration_secs': 0.600903} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.511721] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Destroyed the VM [ 1498.511721] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1498.511721] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-09d1fe48-765d-490d-8d69-858a46e58350 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.521367] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1498.521367] env[63241]: value = "task-1820250" [ 1498.521367] env[63241]: _type = "Task" [ 1498.521367] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.532265] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820250, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.700451] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1820247, 'name': PowerOffVM_Task, 'duration_secs': 0.62876} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.700762] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1498.701056] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1498.701180] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-376937', 'volume_id': 'c32b5066-e324-4377-90d0-ef224dd92932', 'name': 'volume-c32b5066-e324-4377-90d0-ef224dd92932', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c32b5066-e324-4377-90d0-ef224dd92932', 'serial': 'c32b5066-e324-4377-90d0-ef224dd92932'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1498.702034] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079438a7-e97a-4c08-a137-0399bc06b0e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.726041] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc0e8c8-03e5-4ebb-99c5-311d38b219c3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.734530] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c4d592-535d-454b-9e88-79d7345384a6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.759413] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c960964-66af-4d2e-9097-a6029d234a43 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.778848] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] The volume has not been displaced from its original location: [datastore1] volume-c32b5066-e324-4377-90d0-ef224dd92932/volume-c32b5066-e324-4377-90d0-ef224dd92932.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1498.784519] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Reconfiguring VM instance instance-00000011 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1498.785454] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e4e3c2c-08b9-4b85-8631-70f3c119fa58 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.810075] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820249, 'name': ReconfigVM_Task, 'duration_secs': 0.275969} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.810927] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 2d1425f2-ddf9-4e82-bcfe-e11c597d011a/2d1425f2-ddf9-4e82-bcfe-e11c597d011a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1498.815234] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09bc6290-1ea2-4b55-bd11-a95169b3dfcf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.817590] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Waiting for the task: (returnval){ [ 1498.817590] env[63241]: value = "task-1820251" [ 1498.817590] env[63241]: _type = "Task" [ 1498.817590] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.824656] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1498.824656] env[63241]: value = "task-1820252" [ 1498.824656] env[63241]: _type = "Task" [ 1498.824656] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.833217] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1820251, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.840280] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820252, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.981456] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.033349] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820250, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.174970] env[63241]: DEBUG nova.virt.hardware [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1499.175363] env[63241]: DEBUG nova.virt.hardware [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1499.175775] env[63241]: DEBUG nova.virt.hardware [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1499.175853] env[63241]: DEBUG nova.virt.hardware [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1499.176061] env[63241]: DEBUG nova.virt.hardware [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1499.177037] env[63241]: DEBUG nova.virt.hardware [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1499.177037] env[63241]: DEBUG nova.virt.hardware [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1499.177037] env[63241]: DEBUG nova.virt.hardware [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1499.177212] env[63241]: DEBUG nova.virt.hardware [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1499.177408] env[63241]: DEBUG nova.virt.hardware [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1499.177692] env[63241]: DEBUG nova.virt.hardware [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1499.179126] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfa8d7c-e1e3-476f-acbc-4b9b36f130f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.200886] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e0630a-5240-4495-a7e2-8fed858051de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.223143] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:9b:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '111a2767-1b06-4fe5-852b-40c9b5a699fd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e273a71-b8e7-4187-a1cd-c61f52ba3e85', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1499.230595] env[63241]: DEBUG oslo.service.loopingcall [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.233439] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1499.233917] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13102558-9d15-4dd8-8e87-7868adc185c1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.258738] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1499.258738] env[63241]: value = "task-1820253" [ 1499.258738] env[63241]: _type = "Task" [ 1499.258738] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.274278] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820253, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.318412] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.321094] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.321094] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.321094] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.321094] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.325145] env[63241]: INFO nova.compute.manager [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Terminating instance [ 1499.334716] env[63241]: DEBUG nova.compute.manager [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1499.334971] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1499.335305] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1820251, 'name': ReconfigVM_Task, 'duration_secs': 0.245189} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.336059] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5360c6c-ee09-4d22-ace7-e1eae0b7a1fa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.342975] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Reconfigured VM instance instance-00000011 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1499.351741] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e12cb75-2126-4ca1-8446-a707178fb36a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.363431] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820252, 'name': Rename_Task, 'duration_secs': 0.215343} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.364604] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1499.365948] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc431f22-18e7-465e-92a9-50e986b3491a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.373682] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1499.377117] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53c3ab03-08a7-41e7-82d2-741d617df76a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.377666] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Waiting for the task: (returnval){ [ 1499.377666] env[63241]: value = "task-1820254" [ 1499.377666] env[63241]: _type = "Task" [ 1499.377666] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.382584] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1499.382584] env[63241]: value = "task-1820255" [ 1499.382584] env[63241]: _type = "Task" [ 1499.382584] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.392466] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1820254, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.401056] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820255, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.484082] env[63241]: DEBUG oslo_vmware.api [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820230, 'name': ReconfigVM_Task, 'duration_secs': 5.912954} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.490974] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.491237] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Reconfigured VM to detach interface {{(pid=63241) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1499.502969] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1499.503273] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1499.503425] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleting the datastore file [datastore1] efbe39fa-d581-41ac-b51c-9c94c9839d7a {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1499.503719] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f9532d4-ba5c-4d4b-88ef-d8e90bffcc8d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.518259] env[63241]: DEBUG oslo_vmware.api [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1499.518259] env[63241]: value = "task-1820257" [ 1499.518259] env[63241]: _type = "Task" [ 1499.518259] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.535034] env[63241]: DEBUG oslo_vmware.api [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820257, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.550432] env[63241]: DEBUG oslo_vmware.api [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820250, 'name': RemoveSnapshot_Task, 'duration_secs': 0.586535} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.562968] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1499.562968] env[63241]: INFO nova.compute.manager [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Took 15.88 seconds to snapshot the instance on the hypervisor. [ 1499.672190] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45419a4d-62b6-4c00-a964-f4132e46ccdb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.682759] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e33ed1-02e9-4c7d-9d5b-ea655036a59f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.730196] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f446be3c-9fac-47d2-8e46-2368f7bca222 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.733339] env[63241]: DEBUG nova.compute.manager [req-e651fe57-7647-4b00-a9b6-5003e3c7beb7 req-7868e2bf-52ad-49bb-8f91-95cd37122b91 service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Received event network-changed-0d383637-3a9e-4430-80c1-4b6b738e5817 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1499.733582] env[63241]: DEBUG nova.compute.manager [req-e651fe57-7647-4b00-a9b6-5003e3c7beb7 req-7868e2bf-52ad-49bb-8f91-95cd37122b91 service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Refreshing instance network info cache due to event network-changed-0d383637-3a9e-4430-80c1-4b6b738e5817. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1499.733729] env[63241]: DEBUG oslo_concurrency.lockutils [req-e651fe57-7647-4b00-a9b6-5003e3c7beb7 req-7868e2bf-52ad-49bb-8f91-95cd37122b91 service nova] Acquiring lock "refresh_cache-9361ee6a-7c4d-4409-bc3c-7da7d4550d97" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.733860] env[63241]: DEBUG oslo_concurrency.lockutils [req-e651fe57-7647-4b00-a9b6-5003e3c7beb7 req-7868e2bf-52ad-49bb-8f91-95cd37122b91 service nova] Acquired lock "refresh_cache-9361ee6a-7c4d-4409-bc3c-7da7d4550d97" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.735016] env[63241]: DEBUG nova.network.neutron [req-e651fe57-7647-4b00-a9b6-5003e3c7beb7 req-7868e2bf-52ad-49bb-8f91-95cd37122b91 service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Refreshing network info cache for port 0d383637-3a9e-4430-80c1-4b6b738e5817 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1499.746536] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f4e026-09bd-4dd9-b7b0-aaa08f7e1f0d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.766265] env[63241]: DEBUG nova.compute.provider_tree [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1499.777495] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820253, 'name': CreateVM_Task, 'duration_secs': 0.471361} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.777495] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1499.778182] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.778349] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.778703] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1499.779732] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfb04938-c7d9-45c1-b637-eee80fb235ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.787720] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1499.787720] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a0d53f-84d5-d12d-0a3f-bd584e813d78" [ 1499.787720] env[63241]: _type = "Task" [ 1499.787720] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.798264] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a0d53f-84d5-d12d-0a3f-bd584e813d78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.898260] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820255, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.898526] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1820254, 'name': ReconfigVM_Task, 'duration_secs': 0.179786} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.898830] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-376937', 'volume_id': 'c32b5066-e324-4377-90d0-ef224dd92932', 'name': 'volume-c32b5066-e324-4377-90d0-ef224dd92932', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3', 'attached_at': '', 'detached_at': '', 'volume_id': 'c32b5066-e324-4377-90d0-ef224dd92932', 'serial': 'c32b5066-e324-4377-90d0-ef224dd92932'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1499.899136] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1499.899946] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca092405-5713-45bf-9900-da9ccfb1b198 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.907792] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1499.908079] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ac0edb9-d790-4293-8b8d-48c7cb300159 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.035131] env[63241]: DEBUG oslo_vmware.api [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820257, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18488} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.036303] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1500.037273] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1500.037749] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1500.038110] env[63241]: INFO nova.compute.manager [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Took 0.70 seconds to destroy the instance on the hypervisor. [ 1500.038706] env[63241]: DEBUG oslo.service.loopingcall [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1500.040204] env[63241]: DEBUG nova.compute.manager [-] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1500.040609] env[63241]: DEBUG nova.network.neutron [-] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1500.045128] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1500.045128] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1500.045263] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Deleting the datastore file [datastore1] c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1500.045887] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ff5af3e-fcd5-46f3-8873-27260876003f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.053940] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Waiting for the task: (returnval){ [ 1500.053940] env[63241]: value = "task-1820259" [ 1500.053940] env[63241]: _type = "Task" [ 1500.053940] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.070419] env[63241]: DEBUG nova.compute.manager [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Instance disappeared during snapshot {{(pid=63241) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1500.073254] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1820259, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.094016] env[63241]: DEBUG nova.compute.manager [None req-78628b14-71e8-4556-8074-c4489a797d95 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Image not found during clean up 29b41d3f-4ad1-4bd1-9f5e-450efb73d68d {{(pid=63241) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4501}} [ 1500.189448] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "7158c64a-5036-419b-b110-7e22c12bf3dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.189861] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "7158c64a-5036-419b-b110-7e22c12bf3dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.211784] env[63241]: DEBUG nova.compute.manager [req-24b2bcdf-e7d4-402f-9874-1ea1b1d7cae6 req-52fb8d1d-5a7c-46d8-9a1d-426c5bef4626 service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Received event network-vif-deleted-6c744114-882e-410b-a8fd-10bac7d2be70 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1500.211892] env[63241]: INFO nova.compute.manager [req-24b2bcdf-e7d4-402f-9874-1ea1b1d7cae6 req-52fb8d1d-5a7c-46d8-9a1d-426c5bef4626 service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Neutron deleted interface 6c744114-882e-410b-a8fd-10bac7d2be70; detaching it from the instance and deleting it from the info cache [ 1500.212536] env[63241]: DEBUG nova.network.neutron [req-24b2bcdf-e7d4-402f-9874-1ea1b1d7cae6 req-52fb8d1d-5a7c-46d8-9a1d-426c5bef4626 service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Updating instance_info_cache with network_info: [{"id": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "address": "fa:16:3e:fd:57:de", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e94bb05-04", "ovs_interfaceid": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.274129] env[63241]: DEBUG nova.scheduler.client.report [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1500.305685] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a0d53f-84d5-d12d-0a3f-bd584e813d78, 'name': SearchDatastore_Task, 'duration_secs': 0.012006} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.306013] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.306260] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1500.306510] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.306661] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.306844] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1500.307129] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a1d4f70-8294-4f1f-bca4-fe788d24e3cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.317209] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1500.320962] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1500.320962] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37a98424-f286-4acf-8416-2be70a009b91 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.333512] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1500.333512] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526625ef-bf80-ee3a-1cd5-982c54088ccd" [ 1500.333512] env[63241]: _type = "Task" [ 1500.333512] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.344959] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526625ef-bf80-ee3a-1cd5-982c54088ccd, 'name': SearchDatastore_Task, 'duration_secs': 0.01144} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.345788] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f99c1851-5f66-44a2-83ac-bef34593068a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.352900] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1500.352900] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52682f61-10b6-1f1d-0d92-5131ed95ec00" [ 1500.352900] env[63241]: _type = "Task" [ 1500.352900] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.362130] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52682f61-10b6-1f1d-0d92-5131ed95ec00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.397326] env[63241]: DEBUG oslo_vmware.api [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820255, 'name': PowerOnVM_Task, 'duration_secs': 0.909412} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.397649] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1500.397887] env[63241]: INFO nova.compute.manager [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Took 8.51 seconds to spawn the instance on the hypervisor. [ 1500.398101] env[63241]: DEBUG nova.compute.manager [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1500.398936] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db750a4d-79c3-48c6-b926-ff9c07338abd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.568065] env[63241]: DEBUG oslo_vmware.api [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Task: {'id': task-1820259, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087576} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.568065] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1500.568065] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1500.568065] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1500.568065] env[63241]: INFO nova.compute.manager [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Took 2.90 seconds to destroy the instance on the hypervisor. [ 1500.568065] env[63241]: DEBUG oslo.service.loopingcall [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1500.568065] env[63241]: DEBUG nova.compute.manager [-] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1500.568065] env[63241]: DEBUG nova.network.neutron [-] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1500.664023] env[63241]: DEBUG nova.network.neutron [req-e651fe57-7647-4b00-a9b6-5003e3c7beb7 req-7868e2bf-52ad-49bb-8f91-95cd37122b91 service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Updated VIF entry in instance network info cache for port 0d383637-3a9e-4430-80c1-4b6b738e5817. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1500.664465] env[63241]: DEBUG nova.network.neutron [req-e651fe57-7647-4b00-a9b6-5003e3c7beb7 req-7868e2bf-52ad-49bb-8f91-95cd37122b91 service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Updating instance_info_cache with network_info: [{"id": "0d383637-3a9e-4430-80c1-4b6b738e5817", "address": "fa:16:3e:66:40:ff", "network": {"id": "6869db06-fa2f-4bd1-85a1-474272099df2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1341715179-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33b2cbeab90443c48beaa0b41ba17c1c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1ce8361b-fd8e-4971-a37f-b84a4f77db19", "external-id": "nsx-vlan-transportzone-255", "segmentation_id": 255, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d383637-3a", "ovs_interfaceid": "0d383637-3a9e-4430-80c1-4b6b738e5817", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.695299] env[63241]: DEBUG nova.compute.manager [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1500.716714] env[63241]: DEBUG oslo_concurrency.lockutils [req-24b2bcdf-e7d4-402f-9874-1ea1b1d7cae6 req-52fb8d1d-5a7c-46d8-9a1d-426c5bef4626 service nova] Acquiring lock "27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.716714] env[63241]: DEBUG oslo_concurrency.lockutils [req-24b2bcdf-e7d4-402f-9874-1ea1b1d7cae6 req-52fb8d1d-5a7c-46d8-9a1d-426c5bef4626 service nova] Acquired lock "27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.717814] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5157c35-97ac-4998-9130-9514d0f894fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.742789] env[63241]: DEBUG oslo_concurrency.lockutils [req-24b2bcdf-e7d4-402f-9874-1ea1b1d7cae6 req-52fb8d1d-5a7c-46d8-9a1d-426c5bef4626 service nova] Releasing lock "27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.745612] env[63241]: WARNING nova.compute.manager [req-24b2bcdf-e7d4-402f-9874-1ea1b1d7cae6 req-52fb8d1d-5a7c-46d8-9a1d-426c5bef4626 service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Detach interface failed, port_id=6c744114-882e-410b-a8fd-10bac7d2be70, reason: No device with interface-id 6c744114-882e-410b-a8fd-10bac7d2be70 exists on VM: nova.exception.NotFound: No device with interface-id 6c744114-882e-410b-a8fd-10bac7d2be70 exists on VM [ 1500.778983] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.846s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.779761] env[63241]: DEBUG nova.compute.manager [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1500.784365] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.003s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.784595] env[63241]: DEBUG nova.objects.instance [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lazy-loading 'resources' on Instance uuid 343a7e90-5e55-4125-8475-44050f267987 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1500.864450] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52682f61-10b6-1f1d-0d92-5131ed95ec00, 'name': SearchDatastore_Task, 'duration_secs': 0.009858} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.864735] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.865013] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e3df56a7-eb82-4297-8aa3-f77c0380b6ec/e3df56a7-eb82-4297-8aa3-f77c0380b6ec.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1500.865274] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ee4a0f7-0b2f-40c3-94da-20e58814b4d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.874060] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1500.874060] env[63241]: value = "task-1820260" [ 1500.874060] env[63241]: _type = "Task" [ 1500.874060] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.886177] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.924473] env[63241]: INFO nova.compute.manager [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Took 38.52 seconds to build instance. [ 1500.974934] env[63241]: DEBUG nova.network.neutron [-] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.981083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.982149] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.982375] env[63241]: DEBUG nova.network.neutron [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1501.167434] env[63241]: DEBUG oslo_concurrency.lockutils [req-e651fe57-7647-4b00-a9b6-5003e3c7beb7 req-7868e2bf-52ad-49bb-8f91-95cd37122b91 service nova] Releasing lock "refresh_cache-9361ee6a-7c4d-4409-bc3c-7da7d4550d97" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.216875] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.288593] env[63241]: DEBUG nova.compute.utils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1501.298688] env[63241]: DEBUG nova.compute.manager [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1501.299008] env[63241]: DEBUG nova.network.neutron [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1501.349107] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "27177719-5090-43de-9bca-6db6bebab7b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.349384] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "27177719-5090-43de-9bca-6db6bebab7b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.349720] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "27177719-5090-43de-9bca-6db6bebab7b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.349950] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "27177719-5090-43de-9bca-6db6bebab7b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.350143] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "27177719-5090-43de-9bca-6db6bebab7b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.356198] env[63241]: INFO nova.compute.manager [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Terminating instance [ 1501.360716] env[63241]: DEBUG nova.compute.manager [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1501.362509] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1501.362509] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea25caff-0bab-473b-93ec-d314c5108d0f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.377704] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1501.384783] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17715f04-773e-47b0-99ca-59c77d1028ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.396940] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820260, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487042} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.398456] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e3df56a7-eb82-4297-8aa3-f77c0380b6ec/e3df56a7-eb82-4297-8aa3-f77c0380b6ec.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1501.398456] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1501.398844] env[63241]: DEBUG oslo_vmware.api [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1501.398844] env[63241]: value = "task-1820261" [ 1501.398844] env[63241]: _type = "Task" [ 1501.398844] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.399187] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e5f7b61-efae-4679-97ca-c26277cea580 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.407969] env[63241]: DEBUG nova.policy [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53f5b40d7cf04b3d8702df00367b22a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38c709b68d2a40049d6d4795267987d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1501.420941] env[63241]: DEBUG oslo_vmware.api [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820261, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.420941] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1501.420941] env[63241]: value = "task-1820262" [ 1501.420941] env[63241]: _type = "Task" [ 1501.420941] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.428018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71292284-778a-4a46-9d22-4b89494d9931 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.554s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.433057] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820262, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.482747] env[63241]: INFO nova.compute.manager [-] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Took 1.44 seconds to deallocate network for instance. [ 1501.655837] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquiring lock "e3842404-2c80-4fa9-b0c9-c58c484845a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.656113] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lock "e3842404-2c80-4fa9-b0c9-c58c484845a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.789180] env[63241]: DEBUG nova.network.neutron [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Updating instance_info_cache with network_info: [{"id": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "address": "fa:16:3e:fd:57:de", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e94bb05-04", "ovs_interfaceid": "2e94bb05-0411-4916-b14d-3c2ebc9dfccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.801465] env[63241]: DEBUG nova.compute.manager [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1501.855386] env[63241]: DEBUG nova.network.neutron [-] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.902480] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ea63a5-d0ec-4e01-89d9-c8dcefde9cc4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.915201] env[63241]: DEBUG oslo_vmware.api [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820261, 'name': PowerOffVM_Task, 'duration_secs': 0.235419} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.917230] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1501.917412] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1501.917706] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-816b43b1-89e8-4b01-9149-be675aefadfd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.919907] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1571b133-8cfe-4f15-a17b-c6c9f708da31 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.931937] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820262, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07554} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.956272] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1501.957727] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21fe9be-1466-4750-93dc-7e742b59d7c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.961463] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3466fc4c-0b9a-4db2-9c38-2a02b8c113a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.983379] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1af677-e163-47d2-97b5-8dda4ae34e0e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.003772] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] e3df56a7-eb82-4297-8aa3-f77c0380b6ec/e3df56a7-eb82-4297-8aa3-f77c0380b6ec.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1502.004961] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.005275] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-396da94a-e247-45ec-8645-eb28e1947d55 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.021909] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1502.022155] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1502.022339] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleting the datastore file [datastore1] 27177719-5090-43de-9bca-6db6bebab7b4 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1502.022936] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f94b79d-4090-48cc-ac8c-98b5740d18e4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.033588] env[63241]: DEBUG nova.compute.provider_tree [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1502.036700] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1502.036700] env[63241]: value = "task-1820264" [ 1502.036700] env[63241]: _type = "Task" [ 1502.036700] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.041873] env[63241]: DEBUG oslo_vmware.api [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1502.041873] env[63241]: value = "task-1820265" [ 1502.041873] env[63241]: _type = "Task" [ 1502.041873] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.046168] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820264, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.056760] env[63241]: DEBUG oslo_vmware.api [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820265, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.163254] env[63241]: DEBUG nova.compute.manager [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1502.281513] env[63241]: DEBUG nova.network.neutron [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Successfully created port: 5727d5d3-d1ae-4830-a899-52c5d7ea9414 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1502.295114] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-27177719-5090-43de-9bca-6db6bebab7b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.359696] env[63241]: INFO nova.compute.manager [-] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Took 1.79 seconds to deallocate network for instance. [ 1502.541194] env[63241]: DEBUG nova.scheduler.client.report [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1502.562185] env[63241]: DEBUG oslo_vmware.api [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820265, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205606} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.565437] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1502.565637] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1502.565889] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1502.565987] env[63241]: INFO nova.compute.manager [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1502.566242] env[63241]: DEBUG oslo.service.loopingcall [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1502.566439] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820264, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.567273] env[63241]: DEBUG nova.compute.manager [-] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1502.567373] env[63241]: DEBUG nova.network.neutron [-] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1502.679884] env[63241]: DEBUG nova.compute.manager [req-703319bc-32bb-4293-8c3c-f584b7aa23c0 req-5eeb6f14-7af0-4f98-b97b-c08b0833330b service nova] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Received event network-vif-deleted-6c7369d6-fc45-46eb-8603-bc0f7c519f03 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1502.680203] env[63241]: DEBUG nova.compute.manager [req-703319bc-32bb-4293-8c3c-f584b7aa23c0 req-5eeb6f14-7af0-4f98-b97b-c08b0833330b service nova] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Received event network-vif-deleted-f9f1a2e1-b5a2-4a7d-a67d-fd30131e175e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1502.692312] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.799085] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b3c32521-0590-4f36-9cd7-42b94637f6de tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-27177719-5090-43de-9bca-6db6bebab7b4-6c744114-882e-410b-a8fd-10bac7d2be70" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.937s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.807369] env[63241]: DEBUG nova.compute.manager [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1502.845163] env[63241]: DEBUG nova.virt.hardware [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1502.845566] env[63241]: DEBUG nova.virt.hardware [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1502.845817] env[63241]: DEBUG nova.virt.hardware [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1502.846125] env[63241]: DEBUG nova.virt.hardware [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1502.846365] env[63241]: DEBUG nova.virt.hardware [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1502.846606] env[63241]: DEBUG nova.virt.hardware [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1502.846913] env[63241]: DEBUG nova.virt.hardware [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1502.847166] env[63241]: DEBUG nova.virt.hardware [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1502.847426] env[63241]: DEBUG nova.virt.hardware [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1502.847672] env[63241]: DEBUG nova.virt.hardware [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1502.847929] env[63241]: DEBUG nova.virt.hardware [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1502.849605] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045c86ba-1e5c-449e-8ac8-50317c90c1a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.860883] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1376a3-f932-4882-8d21-6fe9af3514e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.952324] env[63241]: INFO nova.compute.manager [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Took 0.59 seconds to detach 1 volumes for instance. [ 1502.954888] env[63241]: DEBUG nova.compute.manager [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Deleting volume: c32b5066-e324-4377-90d0-ef224dd92932 {{(pid=63241) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1503.054633] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.270s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.060793] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.553s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.062620] env[63241]: INFO nova.compute.claims [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1503.080384] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820264, 'name': ReconfigVM_Task, 'duration_secs': 0.819359} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.081990] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Reconfigured VM instance instance-0000002b to attach disk [datastore1] e3df56a7-eb82-4297-8aa3-f77c0380b6ec/e3df56a7-eb82-4297-8aa3-f77c0380b6ec.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1503.083354] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62197f31-d306-4c1b-b92d-b76e525242ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.093290] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1503.093290] env[63241]: value = "task-1820267" [ 1503.093290] env[63241]: _type = "Task" [ 1503.093290] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.109968] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820267, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.113779] env[63241]: INFO nova.scheduler.client.report [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Deleted allocations for instance 343a7e90-5e55-4125-8475-44050f267987 [ 1503.252304] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "943100f1-e702-4869-8c19-d81d39712ac5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.253752] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "943100f1-e702-4869-8c19-d81d39712ac5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.472718] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.472718] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.501673] env[63241]: DEBUG oslo_concurrency.lockutils [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.501673] env[63241]: DEBUG oslo_concurrency.lockutils [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.504760] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.604694] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820267, 'name': Rename_Task, 'duration_secs': 0.171906} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.604976] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1503.605243] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd011167-7fd3-4c41-8369-96dbae9ac37e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.611940] env[63241]: DEBUG nova.network.neutron [-] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.614299] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1503.614299] env[63241]: value = "task-1820268" [ 1503.614299] env[63241]: _type = "Task" [ 1503.614299] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.622477] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28051f09-b5d0-4d58-97ca-0f78d648a61b tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "343a7e90-5e55-4125-8475-44050f267987" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.833s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.627998] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820268, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.756984] env[63241]: DEBUG nova.compute.manager [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1503.985766] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.986029] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1504.004930] env[63241]: DEBUG nova.compute.utils [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1504.116020] env[63241]: INFO nova.compute.manager [-] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Took 1.55 seconds to deallocate network for instance. [ 1504.137468] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820268, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.162168] env[63241]: DEBUG nova.network.neutron [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Successfully updated port: 5727d5d3-d1ae-4830-a899-52c5d7ea9414 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1504.276616] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.443748] env[63241]: DEBUG nova.compute.manager [req-1f247045-0511-43a5-b84d-9b9a6c9fb8f9 req-4c979e3b-1d4e-4b72-9e1c-176854ec05ca service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Received event network-vif-plugged-5727d5d3-d1ae-4830-a899-52c5d7ea9414 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1504.443955] env[63241]: DEBUG oslo_concurrency.lockutils [req-1f247045-0511-43a5-b84d-9b9a6c9fb8f9 req-4c979e3b-1d4e-4b72-9e1c-176854ec05ca service nova] Acquiring lock "e753da08-d4a5-4f17-85c8-154e843798c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.444094] env[63241]: DEBUG oslo_concurrency.lockutils [req-1f247045-0511-43a5-b84d-9b9a6c9fb8f9 req-4c979e3b-1d4e-4b72-9e1c-176854ec05ca service nova] Lock "e753da08-d4a5-4f17-85c8-154e843798c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.444269] env[63241]: DEBUG oslo_concurrency.lockutils [req-1f247045-0511-43a5-b84d-9b9a6c9fb8f9 req-4c979e3b-1d4e-4b72-9e1c-176854ec05ca service nova] Lock "e753da08-d4a5-4f17-85c8-154e843798c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.444436] env[63241]: DEBUG nova.compute.manager [req-1f247045-0511-43a5-b84d-9b9a6c9fb8f9 req-4c979e3b-1d4e-4b72-9e1c-176854ec05ca service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] No waiting events found dispatching network-vif-plugged-5727d5d3-d1ae-4830-a899-52c5d7ea9414 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1504.444600] env[63241]: WARNING nova.compute.manager [req-1f247045-0511-43a5-b84d-9b9a6c9fb8f9 req-4c979e3b-1d4e-4b72-9e1c-176854ec05ca service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Received unexpected event network-vif-plugged-5727d5d3-d1ae-4830-a899-52c5d7ea9414 for instance with vm_state building and task_state spawning. [ 1504.508413] env[63241]: DEBUG oslo_concurrency.lockutils [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.517970] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.517970] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.517970] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1504.574673] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cc0c4a-b04d-406e-bd4c-69c5d3addcdf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.584626] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c650e1-54f8-4e54-943a-d4a03181c1f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.621400] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea0f8aa-5f37-4edd-a82e-1e2d04934eeb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.626127] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.629720] env[63241]: DEBUG oslo_vmware.api [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820268, 'name': PowerOnVM_Task, 'duration_secs': 0.887735} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.631707] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1504.631919] env[63241]: DEBUG nova.compute.manager [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1504.632694] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83841744-57e3-4214-ae6a-68aa51d9d745 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.635917] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70e80e2-5c63-4097-ae21-8e3ef0b65c3d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.656060] env[63241]: DEBUG nova.compute.provider_tree [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1504.664059] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "refresh_cache-e753da08-d4a5-4f17-85c8-154e843798c9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.664211] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "refresh_cache-e753da08-d4a5-4f17-85c8-154e843798c9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.664356] env[63241]: DEBUG nova.network.neutron [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1505.162765] env[63241]: DEBUG nova.scheduler.client.report [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1505.166844] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.193953] env[63241]: DEBUG nova.compute.manager [req-8da85472-3465-4bdb-a2a6-18bf62b626e2 req-47bd5e8b-f4a3-40ef-8a02-f1e0fde40870 service nova] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Received event network-vif-deleted-2e94bb05-0411-4916-b14d-3c2ebc9dfccd {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1505.212472] env[63241]: DEBUG nova.network.neutron [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1505.493494] env[63241]: DEBUG nova.network.neutron [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Updating instance_info_cache with network_info: [{"id": "5727d5d3-d1ae-4830-a899-52c5d7ea9414", "address": "fa:16:3e:eb:0e:66", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5727d5d3-d1", "ovs_interfaceid": "5727d5d3-d1ae-4830-a899-52c5d7ea9414", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.588933] env[63241]: DEBUG oslo_concurrency.lockutils [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.589449] env[63241]: DEBUG oslo_concurrency.lockutils [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.589828] env[63241]: INFO nova.compute.manager [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Attaching volume 037b9a76-8815-4762-9780-c0a21b05b3bd to /dev/sdb [ 1505.629571] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960b88c4-93bd-4ddc-9e66-accf1ed2ec7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.639033] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc2a5d0-e103-4b99-a085-927bc44aee83 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.659668] env[63241]: DEBUG nova.virt.block_device [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Updating existing volume attachment record: 8656287c-40d9-4c43-8782-ecdd2abc288c {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1505.668930] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.608s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.669552] env[63241]: DEBUG nova.compute.manager [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1505.672461] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.080s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.672848] env[63241]: DEBUG nova.objects.instance [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lazy-loading 'resources' on Instance uuid 44508cc6-c576-4c30-8559-75118ceba02a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1505.861631] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Updating instance_info_cache with network_info: [{"id": "39b9ee92-fa8c-4018-be8f-6ad78d44a1a8", "address": "fa:16:3e:02:28:3a", "network": {"id": "66538b1c-dfa7-4a9a-84ed-9775e692d300", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1045273516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "06bbbe738ef34806971a4883b7bb3cc4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39b9ee92-fa", "ovs_interfaceid": "39b9ee92-fa8c-4018-be8f-6ad78d44a1a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.996548] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "refresh_cache-e753da08-d4a5-4f17-85c8-154e843798c9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.996970] env[63241]: DEBUG nova.compute.manager [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Instance network_info: |[{"id": "5727d5d3-d1ae-4830-a899-52c5d7ea9414", "address": "fa:16:3e:eb:0e:66", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5727d5d3-d1", "ovs_interfaceid": "5727d5d3-d1ae-4830-a899-52c5d7ea9414", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1505.997506] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:0e:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '329d0e4b-4190-484a-8560-9356dc31beca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5727d5d3-d1ae-4830-a899-52c5d7ea9414', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1506.007325] env[63241]: DEBUG oslo.service.loopingcall [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1506.007325] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1506.007325] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4abb1596-53f0-443b-8f15-5e5b2ec6c2dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.030243] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1506.030243] env[63241]: value = "task-1820270" [ 1506.030243] env[63241]: _type = "Task" [ 1506.030243] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.038377] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820270, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.179210] env[63241]: DEBUG nova.compute.utils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1506.183455] env[63241]: DEBUG nova.compute.manager [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1506.183641] env[63241]: DEBUG nova.network.neutron [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1506.272737] env[63241]: DEBUG nova.policy [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f44f1abb9bdf421ebccad168ae7ec1c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09cb3d8f5a8f4ea9aa7cd73dae3a721d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1506.371815] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-780f3eee-f6c7-4054-8e6e-a370f74dc405" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.371815] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1506.372682] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.372682] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.372682] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.372899] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.373427] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.374921] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.375390] env[63241]: INFO nova.compute.manager [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Terminating instance [ 1506.377416] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.377837] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.380867] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.381559] env[63241]: DEBUG nova.compute.manager [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1506.382352] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1506.382352] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.383684] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251ebdcf-c510-416a-99c6-05d1aa24a7d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.386164] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.386308] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1506.386466] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.396534] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1506.399765] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44aa9fd2-7366-44f8-927a-7c75bb47d526 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.410447] env[63241]: DEBUG oslo_vmware.api [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1506.410447] env[63241]: value = "task-1820273" [ 1506.410447] env[63241]: _type = "Task" [ 1506.410447] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.419994] env[63241]: DEBUG oslo_vmware.api [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820273, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.549957] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820270, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.586771] env[63241]: DEBUG nova.compute.manager [req-142fe9e7-5528-4dae-910d-90c1b1b7e63b req-f9fb4b56-b7a7-4661-848b-0fa8668ea67c service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Received event network-changed-5727d5d3-d1ae-4830-a899-52c5d7ea9414 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1506.586988] env[63241]: DEBUG nova.compute.manager [req-142fe9e7-5528-4dae-910d-90c1b1b7e63b req-f9fb4b56-b7a7-4661-848b-0fa8668ea67c service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Refreshing instance network info cache due to event network-changed-5727d5d3-d1ae-4830-a899-52c5d7ea9414. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1506.587226] env[63241]: DEBUG oslo_concurrency.lockutils [req-142fe9e7-5528-4dae-910d-90c1b1b7e63b req-f9fb4b56-b7a7-4661-848b-0fa8668ea67c service nova] Acquiring lock "refresh_cache-e753da08-d4a5-4f17-85c8-154e843798c9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.587366] env[63241]: DEBUG oslo_concurrency.lockutils [req-142fe9e7-5528-4dae-910d-90c1b1b7e63b req-f9fb4b56-b7a7-4661-848b-0fa8668ea67c service nova] Acquired lock "refresh_cache-e753da08-d4a5-4f17-85c8-154e843798c9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.587527] env[63241]: DEBUG nova.network.neutron [req-142fe9e7-5528-4dae-910d-90c1b1b7e63b req-f9fb4b56-b7a7-4661-848b-0fa8668ea67c service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Refreshing network info cache for port 5727d5d3-d1ae-4830-a899-52c5d7ea9414 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1506.684112] env[63241]: DEBUG nova.compute.manager [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1506.766383] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d02055a-9670-44f6-a217-590811b52ca9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.780568] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa07adbc-72ee-441f-8674-020b35e6de7f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.823976] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7277fbe-ac94-46fe-a7d4-df5fb57484e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.833092] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90986c8e-fe47-43d1-9a96-f54b5837f787 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.848478] env[63241]: DEBUG nova.compute.provider_tree [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1506.881213] env[63241]: DEBUG nova.network.neutron [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Successfully created port: 2f1329d9-5fe9-46cc-817a-c247a1999456 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1506.889587] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.920979] env[63241]: DEBUG oslo_vmware.api [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820273, 'name': PowerOffVM_Task, 'duration_secs': 0.226043} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.921326] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1506.921501] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1506.921760] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6d3c1ce-57e7-4735-932f-1b9458abf121 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.016877] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "34d138e6-90b3-4243-bf45-96ae856cd631" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.017223] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "34d138e6-90b3-4243-bf45-96ae856cd631" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.018553] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1507.018762] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1507.019015] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleting the datastore file [datastore1] e3df56a7-eb82-4297-8aa3-f77c0380b6ec {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1507.019552] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18b1129f-8e87-4f07-a793-37f565b7ea96 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.026733] env[63241]: DEBUG oslo_vmware.api [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1507.026733] env[63241]: value = "task-1820275" [ 1507.026733] env[63241]: _type = "Task" [ 1507.026733] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.038513] env[63241]: DEBUG oslo_vmware.api [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820275, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.044218] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820270, 'name': CreateVM_Task, 'duration_secs': 0.559591} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.044218] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1507.044881] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1507.045120] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.045445] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1507.045694] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2a29ee9-6b57-4168-99d2-edbc8875df80 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.050972] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1507.050972] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520cc51d-1489-0e49-070b-d9cb62995fd1" [ 1507.050972] env[63241]: _type = "Task" [ 1507.050972] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.060053] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520cc51d-1489-0e49-070b-d9cb62995fd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.169409] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.169722] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.169973] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.170191] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.170369] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.173936] env[63241]: INFO nova.compute.manager [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Terminating instance [ 1507.175953] env[63241]: DEBUG nova.compute.manager [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1507.176167] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1507.177185] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac2e9d1-7012-4061-b73e-1831e9aeec8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.189173] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1507.192684] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-579e2143-53ff-40ff-8618-fcf0ab34e76d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.201929] env[63241]: DEBUG oslo_vmware.api [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1507.201929] env[63241]: value = "task-1820276" [ 1507.201929] env[63241]: _type = "Task" [ 1507.201929] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.220123] env[63241]: DEBUG oslo_vmware.api [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820276, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.353274] env[63241]: DEBUG nova.scheduler.client.report [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1507.366884] env[63241]: DEBUG nova.network.neutron [req-142fe9e7-5528-4dae-910d-90c1b1b7e63b req-f9fb4b56-b7a7-4661-848b-0fa8668ea67c service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Updated VIF entry in instance network info cache for port 5727d5d3-d1ae-4830-a899-52c5d7ea9414. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1507.368467] env[63241]: DEBUG nova.network.neutron [req-142fe9e7-5528-4dae-910d-90c1b1b7e63b req-f9fb4b56-b7a7-4661-848b-0fa8668ea67c service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Updating instance_info_cache with network_info: [{"id": "5727d5d3-d1ae-4830-a899-52c5d7ea9414", "address": "fa:16:3e:eb:0e:66", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5727d5d3-d1", "ovs_interfaceid": "5727d5d3-d1ae-4830-a899-52c5d7ea9414", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.537781] env[63241]: DEBUG oslo_vmware.api [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820275, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141192} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.538442] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1507.538643] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1507.538852] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1507.539055] env[63241]: INFO nova.compute.manager [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1507.539309] env[63241]: DEBUG oslo.service.loopingcall [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1507.539497] env[63241]: DEBUG nova.compute.manager [-] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1507.539591] env[63241]: DEBUG nova.network.neutron [-] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1507.561836] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520cc51d-1489-0e49-070b-d9cb62995fd1, 'name': SearchDatastore_Task, 'duration_secs': 0.009968} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.562214] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.562468] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1507.562712] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1507.562875] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.563067] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1507.563367] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc957b75-2b6a-4e0c-a164-419de9870cbd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.577450] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1507.577450] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1507.577917] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b31e9db4-1d60-497b-a0cb-da204d571fae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.584376] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1507.584376] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528936d9-d8c7-9c38-83c4-d0ea435ce49b" [ 1507.584376] env[63241]: _type = "Task" [ 1507.584376] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.593215] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528936d9-d8c7-9c38-83c4-d0ea435ce49b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.697046] env[63241]: DEBUG nova.compute.manager [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1507.716478] env[63241]: DEBUG oslo_vmware.api [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820276, 'name': PowerOffVM_Task, 'duration_secs': 0.232857} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.716772] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1507.716972] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1507.718762] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c411281c-01c6-4d18-b33b-6928ee3fe34a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.729063] env[63241]: DEBUG nova.virt.hardware [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1507.729384] env[63241]: DEBUG nova.virt.hardware [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1507.729615] env[63241]: DEBUG nova.virt.hardware [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1507.730215] env[63241]: DEBUG nova.virt.hardware [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1507.730215] env[63241]: DEBUG nova.virt.hardware [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1507.730424] env[63241]: DEBUG nova.virt.hardware [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1507.730811] env[63241]: DEBUG nova.virt.hardware [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1507.731049] env[63241]: DEBUG nova.virt.hardware [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1507.731322] env[63241]: DEBUG nova.virt.hardware [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1507.731543] env[63241]: DEBUG nova.virt.hardware [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1507.731757] env[63241]: DEBUG nova.virt.hardware [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1507.732670] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1e10bd-e2a0-4f2c-8241-40c78b9c29a2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.742855] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e853295-a7ca-413c-8c6c-8d42f281dd3e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.837380] env[63241]: DEBUG nova.compute.manager [req-21ea9ba4-d9c2-4392-b6bd-91e2730c7a9e req-65b79e09-ebf0-4c3f-9cb3-615f6b171ec8 service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Received event network-vif-deleted-1e273a71-b8e7-4187-a1cd-c61f52ba3e85 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1507.837595] env[63241]: INFO nova.compute.manager [req-21ea9ba4-d9c2-4392-b6bd-91e2730c7a9e req-65b79e09-ebf0-4c3f-9cb3-615f6b171ec8 service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Neutron deleted interface 1e273a71-b8e7-4187-a1cd-c61f52ba3e85; detaching it from the instance and deleting it from the info cache [ 1507.837768] env[63241]: DEBUG nova.network.neutron [req-21ea9ba4-d9c2-4392-b6bd-91e2730c7a9e req-65b79e09-ebf0-4c3f-9cb3-615f6b171ec8 service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.858498] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.186s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.861025] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.432s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.861025] env[63241]: DEBUG nova.objects.instance [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Lazy-loading 'resources' on Instance uuid a88ba00d-6644-4ecc-8603-a7d79ce8a4b4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1507.869964] env[63241]: DEBUG oslo_concurrency.lockutils [req-142fe9e7-5528-4dae-910d-90c1b1b7e63b req-f9fb4b56-b7a7-4661-848b-0fa8668ea67c service nova] Releasing lock "refresh_cache-e753da08-d4a5-4f17-85c8-154e843798c9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.880848] env[63241]: INFO nova.scheduler.client.report [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Deleted allocations for instance 44508cc6-c576-4c30-8559-75118ceba02a [ 1508.022746] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1508.022994] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1508.023200] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Deleting the datastore file [datastore1] a1f24cfe-88f0-4e73-9ade-2dcf907848a1 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1508.023470] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-195f30c4-d61c-4483-b5b2-c6cc4178339a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.031567] env[63241]: DEBUG oslo_vmware.api [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for the task: (returnval){ [ 1508.031567] env[63241]: value = "task-1820278" [ 1508.031567] env[63241]: _type = "Task" [ 1508.031567] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.041833] env[63241]: DEBUG oslo_vmware.api [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.103284] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528936d9-d8c7-9c38-83c4-d0ea435ce49b, 'name': SearchDatastore_Task, 'duration_secs': 0.021805} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.104335] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6127596-d4cc-41e6-a44f-769729ad3832 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.111606] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1508.111606] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524d42a1-0531-3cf8-fb6a-a111230ef9ce" [ 1508.111606] env[63241]: _type = "Task" [ 1508.111606] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.121057] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524d42a1-0531-3cf8-fb6a-a111230ef9ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.293066] env[63241]: DEBUG nova.network.neutron [-] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.341045] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f165a08e-c588-4e45-912e-ea92099134d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.356033] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335587cf-c654-4a26-b9b5-f5e2b5fa7869 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.388026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cac55ba2-af27-4cfe-9473-4c324322f810 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "44508cc6-c576-4c30-8559-75118ceba02a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.086s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.400765] env[63241]: DEBUG nova.compute.manager [req-21ea9ba4-d9c2-4392-b6bd-91e2730c7a9e req-65b79e09-ebf0-4c3f-9cb3-615f6b171ec8 service nova] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Detach interface failed, port_id=1e273a71-b8e7-4187-a1cd-c61f52ba3e85, reason: Instance e3df56a7-eb82-4297-8aa3-f77c0380b6ec could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1508.545104] env[63241]: DEBUG oslo_vmware.api [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Task: {'id': task-1820278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.229206} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.545423] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1508.545612] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1508.545786] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1508.545953] env[63241]: INFO nova.compute.manager [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Took 1.37 seconds to destroy the instance on the hypervisor. [ 1508.546211] env[63241]: DEBUG oslo.service.loopingcall [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1508.546399] env[63241]: DEBUG nova.compute.manager [-] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1508.546495] env[63241]: DEBUG nova.network.neutron [-] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1508.630639] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524d42a1-0531-3cf8-fb6a-a111230ef9ce, 'name': SearchDatastore_Task, 'duration_secs': 0.014017} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.632029] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.632309] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e753da08-d4a5-4f17-85c8-154e843798c9/e753da08-d4a5-4f17-85c8-154e843798c9.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1508.632585] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6cc8ff5-1d7e-4e6f-a219-9930797c5281 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.643756] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1508.643756] env[63241]: value = "task-1820280" [ 1508.643756] env[63241]: _type = "Task" [ 1508.643756] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.655366] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.795057] env[63241]: INFO nova.compute.manager [-] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Took 1.26 seconds to deallocate network for instance. [ 1508.830079] env[63241]: DEBUG nova.compute.manager [req-ad8d1bbb-c45d-40fa-aed4-1e82b3f65844 req-5a38f37b-7e72-4438-b797-89b8d587b9a0 service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Received event network-vif-plugged-2f1329d9-5fe9-46cc-817a-c247a1999456 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1508.830079] env[63241]: DEBUG oslo_concurrency.lockutils [req-ad8d1bbb-c45d-40fa-aed4-1e82b3f65844 req-5a38f37b-7e72-4438-b797-89b8d587b9a0 service nova] Acquiring lock "eaed706d-b3db-46ed-8c70-08f80479afa4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.830079] env[63241]: DEBUG oslo_concurrency.lockutils [req-ad8d1bbb-c45d-40fa-aed4-1e82b3f65844 req-5a38f37b-7e72-4438-b797-89b8d587b9a0 service nova] Lock "eaed706d-b3db-46ed-8c70-08f80479afa4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.830079] env[63241]: DEBUG oslo_concurrency.lockutils [req-ad8d1bbb-c45d-40fa-aed4-1e82b3f65844 req-5a38f37b-7e72-4438-b797-89b8d587b9a0 service nova] Lock "eaed706d-b3db-46ed-8c70-08f80479afa4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.830079] env[63241]: DEBUG nova.compute.manager [req-ad8d1bbb-c45d-40fa-aed4-1e82b3f65844 req-5a38f37b-7e72-4438-b797-89b8d587b9a0 service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] No waiting events found dispatching network-vif-plugged-2f1329d9-5fe9-46cc-817a-c247a1999456 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1508.830664] env[63241]: WARNING nova.compute.manager [req-ad8d1bbb-c45d-40fa-aed4-1e82b3f65844 req-5a38f37b-7e72-4438-b797-89b8d587b9a0 service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Received unexpected event network-vif-plugged-2f1329d9-5fe9-46cc-817a-c247a1999456 for instance with vm_state building and task_state spawning. [ 1508.895212] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab97b10-89ff-4b79-b63f-0557cf63b2d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.905544] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b493f6-14f8-4999-a670-e3e6913889d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.940900] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b7d4f6-a498-4a37-b7c3-3f7a11ed2576 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.949434] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec2e24f-5d0e-400d-8625-fb20f1e6d799 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.965347] env[63241]: DEBUG nova.compute.provider_tree [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1509.156549] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820280, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.162753] env[63241]: DEBUG nova.network.neutron [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Successfully updated port: 2f1329d9-5fe9-46cc-817a-c247a1999456 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1509.302279] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.367808] env[63241]: DEBUG nova.network.neutron [-] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.469049] env[63241]: DEBUG nova.scheduler.client.report [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1509.659583] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820280, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.797373} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.659931] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e753da08-d4a5-4f17-85c8-154e843798c9/e753da08-d4a5-4f17-85c8-154e843798c9.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1509.660183] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1509.660535] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3714f8e-dbfa-4361-843b-4a57170a3d37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.665516] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquiring lock "refresh_cache-eaed706d-b3db-46ed-8c70-08f80479afa4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.665516] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquired lock "refresh_cache-eaed706d-b3db-46ed-8c70-08f80479afa4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.665761] env[63241]: DEBUG nova.network.neutron [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1509.670220] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1509.670220] env[63241]: value = "task-1820281" [ 1509.670220] env[63241]: _type = "Task" [ 1509.670220] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.679945] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820281, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.871058] env[63241]: INFO nova.compute.manager [-] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Took 1.32 seconds to deallocate network for instance. [ 1509.881414] env[63241]: DEBUG nova.compute.manager [req-7dd0f489-939c-4ea1-9a67-68070c6d16c6 req-5b2d80c2-6f54-475c-969a-a2a1fad5c558 service nova] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Received event network-vif-deleted-61bceac0-2e58-4bc3-92f6-c421aabdfc8b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1509.973471] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.113s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.976435] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.114s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.977719] env[63241]: INFO nova.compute.claims [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1509.997134] env[63241]: INFO nova.scheduler.client.report [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Deleted allocations for instance a88ba00d-6644-4ecc-8603-a7d79ce8a4b4 [ 1510.180409] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820281, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069755} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.180688] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1510.181583] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d309a502-c3b5-4e32-a558-d9facb32d50d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.205111] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] e753da08-d4a5-4f17-85c8-154e843798c9/e753da08-d4a5-4f17-85c8-154e843798c9.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1510.205465] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ea27ac3-c4d5-47b5-b5e7-24014e235e6f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.227283] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1510.227283] env[63241]: value = "task-1820282" [ 1510.227283] env[63241]: _type = "Task" [ 1510.227283] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.230912] env[63241]: DEBUG nova.network.neutron [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1510.238718] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820282, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.378477] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.452843] env[63241]: DEBUG nova.network.neutron [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Updating instance_info_cache with network_info: [{"id": "2f1329d9-5fe9-46cc-817a-c247a1999456", "address": "fa:16:3e:54:0b:7d", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.169", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f1329d9-5f", "ovs_interfaceid": "2f1329d9-5fe9-46cc-817a-c247a1999456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.504843] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3268d045-6de1-4064-9947-a1e91c7e7432 tempest-ServersTestJSON-900891466 tempest-ServersTestJSON-900891466-project-member] Lock "a88ba00d-6644-4ecc-8603-a7d79ce8a4b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.046s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.716451] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1510.716681] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377058', 'volume_id': '037b9a76-8815-4762-9780-c0a21b05b3bd', 'name': 'volume-037b9a76-8815-4762-9780-c0a21b05b3bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2d1425f2-ddf9-4e82-bcfe-e11c597d011a', 'attached_at': '', 'detached_at': '', 'volume_id': '037b9a76-8815-4762-9780-c0a21b05b3bd', 'serial': '037b9a76-8815-4762-9780-c0a21b05b3bd'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1510.717645] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e8dba2-cc18-4cf3-a013-4174c2aa0267 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.739487] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f56b32-d79c-43da-a8bc-dff0154caea1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.747765] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820282, 'name': ReconfigVM_Task, 'duration_secs': 0.376239} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.762914] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Reconfigured VM instance instance-0000002e to attach disk [datastore1] e753da08-d4a5-4f17-85c8-154e843798c9/e753da08-d4a5-4f17-85c8-154e843798c9.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1510.771621] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] volume-037b9a76-8815-4762-9780-c0a21b05b3bd/volume-037b9a76-8815-4762-9780-c0a21b05b3bd.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1510.771916] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-89f52f7b-0e6f-4ff5-bbdc-c703e6b7d7e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.773802] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce9dd0f0-27b1-49b9-b8be-14a3808afe32 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.794095] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1510.794095] env[63241]: value = "task-1820283" [ 1510.794095] env[63241]: _type = "Task" [ 1510.794095] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.795406] env[63241]: DEBUG oslo_vmware.api [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1510.795406] env[63241]: value = "task-1820284" [ 1510.795406] env[63241]: _type = "Task" [ 1510.795406] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.811537] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820283, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.814929] env[63241]: DEBUG oslo_vmware.api [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820284, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.957681] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Releasing lock "refresh_cache-eaed706d-b3db-46ed-8c70-08f80479afa4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.957681] env[63241]: DEBUG nova.compute.manager [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Instance network_info: |[{"id": "2f1329d9-5fe9-46cc-817a-c247a1999456", "address": "fa:16:3e:54:0b:7d", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.169", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f1329d9-5f", "ovs_interfaceid": "2f1329d9-5fe9-46cc-817a-c247a1999456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1510.957681] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:0b:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2f1329d9-5fe9-46cc-817a-c247a1999456', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1510.966258] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Creating folder: Project (09cb3d8f5a8f4ea9aa7cd73dae3a721d). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1510.967763] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-522d09ef-87b7-4c1a-b2d6-73bf1bfc9b3a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.970600] env[63241]: DEBUG nova.compute.manager [req-219e6b34-65b3-4067-bfa4-6f67af449271 req-4dbbfced-c4fe-411d-991e-8e7ab2451888 service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Received event network-changed-2f1329d9-5fe9-46cc-817a-c247a1999456 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1510.970782] env[63241]: DEBUG nova.compute.manager [req-219e6b34-65b3-4067-bfa4-6f67af449271 req-4dbbfced-c4fe-411d-991e-8e7ab2451888 service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Refreshing instance network info cache due to event network-changed-2f1329d9-5fe9-46cc-817a-c247a1999456. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1510.970996] env[63241]: DEBUG oslo_concurrency.lockutils [req-219e6b34-65b3-4067-bfa4-6f67af449271 req-4dbbfced-c4fe-411d-991e-8e7ab2451888 service nova] Acquiring lock "refresh_cache-eaed706d-b3db-46ed-8c70-08f80479afa4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.971174] env[63241]: DEBUG oslo_concurrency.lockutils [req-219e6b34-65b3-4067-bfa4-6f67af449271 req-4dbbfced-c4fe-411d-991e-8e7ab2451888 service nova] Acquired lock "refresh_cache-eaed706d-b3db-46ed-8c70-08f80479afa4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.971315] env[63241]: DEBUG nova.network.neutron [req-219e6b34-65b3-4067-bfa4-6f67af449271 req-4dbbfced-c4fe-411d-991e-8e7ab2451888 service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Refreshing network info cache for port 2f1329d9-5fe9-46cc-817a-c247a1999456 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1510.988097] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Created folder: Project (09cb3d8f5a8f4ea9aa7cd73dae3a721d) in parent group-v376927. [ 1510.988307] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Creating folder: Instances. Parent ref: group-v377059. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1510.989026] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1737fcf-af16-4d0b-9afa-c79dc88a8643 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.001691] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Created folder: Instances in parent group-v377059. [ 1511.001951] env[63241]: DEBUG oslo.service.loopingcall [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1511.002165] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1511.002380] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d7125eb-cf79-41e1-acd9-f1473fcff130 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.027175] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1511.027175] env[63241]: value = "task-1820287" [ 1511.027175] env[63241]: _type = "Task" [ 1511.027175] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.039393] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820287, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.311893] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820283, 'name': Rename_Task, 'duration_secs': 0.149703} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.314933] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1511.314933] env[63241]: DEBUG oslo_vmware.api [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820284, 'name': ReconfigVM_Task, 'duration_secs': 0.349872} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.317526] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eaf4f082-2cff-48f8-8f5e-6dde4025b98a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.319168] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Reconfigured VM instance instance-0000002d to attach disk [datastore1] volume-037b9a76-8815-4762-9780-c0a21b05b3bd/volume-037b9a76-8815-4762-9780-c0a21b05b3bd.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1511.324262] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ce84f48-aa3c-44fa-b4fd-2acb108d8a7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.344264] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1511.344264] env[63241]: value = "task-1820288" [ 1511.344264] env[63241]: _type = "Task" [ 1511.344264] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.345746] env[63241]: DEBUG oslo_vmware.api [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1511.345746] env[63241]: value = "task-1820289" [ 1511.345746] env[63241]: _type = "Task" [ 1511.345746] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.362379] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820288, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.366432] env[63241]: DEBUG oslo_vmware.api [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820289, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.479676] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9c5505-bae7-437c-b587-b18b781e8030 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.489320] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68cbbd0-4fa2-483f-99e0-52680e053f26 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.540965] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37df407a-6b2e-4070-a8e2-2fc196854e69 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.553759] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820287, 'name': CreateVM_Task, 'duration_secs': 0.495588} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.555367] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1511.555582] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.555877] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.556207] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1511.557553] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bebe40d-d4bb-40b5-8d2d-738ce2ac3f2b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.561430] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fbc51d3-75b8-45da-87b5-debd062bfafd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.574317] env[63241]: DEBUG nova.compute.provider_tree [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.577997] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for the task: (returnval){ [ 1511.577997] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ae72ca-0c63-f13a-724c-61c2ef1c44b4" [ 1511.577997] env[63241]: _type = "Task" [ 1511.577997] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.589164] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ae72ca-0c63-f13a-724c-61c2ef1c44b4, 'name': SearchDatastore_Task, 'duration_secs': 0.011599} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.589473] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.589708] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1511.590123] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.590123] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.590301] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1511.590567] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-986dbee1-5271-4bf3-8666-c8535436cf07 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.600821] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1511.601013] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1511.601781] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5545dfa-5b64-43eb-8b6d-a9ee2d89176e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.607450] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for the task: (returnval){ [ 1511.607450] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d16385-8f5a-1a60-e655-9a7b007202bf" [ 1511.607450] env[63241]: _type = "Task" [ 1511.607450] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.615606] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d16385-8f5a-1a60-e655-9a7b007202bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.859845] env[63241]: DEBUG oslo_vmware.api [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820288, 'name': PowerOnVM_Task, 'duration_secs': 0.433723} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.865418] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1511.865418] env[63241]: INFO nova.compute.manager [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Took 9.06 seconds to spawn the instance on the hypervisor. [ 1511.865418] env[63241]: DEBUG nova.compute.manager [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1511.865418] env[63241]: DEBUG oslo_vmware.api [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820289, 'name': ReconfigVM_Task, 'duration_secs': 0.166054} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.865418] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e45758-513b-49ea-b6f7-e012996fc01c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.867522] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377058', 'volume_id': '037b9a76-8815-4762-9780-c0a21b05b3bd', 'name': 'volume-037b9a76-8815-4762-9780-c0a21b05b3bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2d1425f2-ddf9-4e82-bcfe-e11c597d011a', 'attached_at': '', 'detached_at': '', 'volume_id': '037b9a76-8815-4762-9780-c0a21b05b3bd', 'serial': '037b9a76-8815-4762-9780-c0a21b05b3bd'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1511.972836] env[63241]: DEBUG nova.network.neutron [req-219e6b34-65b3-4067-bfa4-6f67af449271 req-4dbbfced-c4fe-411d-991e-8e7ab2451888 service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Updated VIF entry in instance network info cache for port 2f1329d9-5fe9-46cc-817a-c247a1999456. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1511.972836] env[63241]: DEBUG nova.network.neutron [req-219e6b34-65b3-4067-bfa4-6f67af449271 req-4dbbfced-c4fe-411d-991e-8e7ab2451888 service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Updating instance_info_cache with network_info: [{"id": "2f1329d9-5fe9-46cc-817a-c247a1999456", "address": "fa:16:3e:54:0b:7d", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.169", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f1329d9-5f", "ovs_interfaceid": "2f1329d9-5fe9-46cc-817a-c247a1999456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.080043] env[63241]: DEBUG nova.scheduler.client.report [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1512.124049] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d16385-8f5a-1a60-e655-9a7b007202bf, 'name': SearchDatastore_Task, 'duration_secs': 0.01057} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.126193] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04d9c334-cb16-486e-b720-134cec106314 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.135022] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for the task: (returnval){ [ 1512.135022] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521c640c-b268-d1d3-5b45-4656dafd2c67" [ 1512.135022] env[63241]: _type = "Task" [ 1512.135022] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.143920] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521c640c-b268-d1d3-5b45-4656dafd2c67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.299975] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquiring lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.299975] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.299975] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquiring lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.299975] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.299975] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.301958] env[63241]: INFO nova.compute.manager [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Terminating instance [ 1512.303768] env[63241]: DEBUG nova.compute.manager [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1512.303859] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1512.304676] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a032f65c-a680-4749-b7a9-16598f3ee7fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.313085] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1512.313331] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4489d515-6457-4964-a6b7-113a70f9e571 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.337084] env[63241]: DEBUG oslo_vmware.api [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for the task: (returnval){ [ 1512.337084] env[63241]: value = "task-1820290" [ 1512.337084] env[63241]: _type = "Task" [ 1512.337084] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.346715] env[63241]: DEBUG oslo_vmware.api [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1820290, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.390895] env[63241]: DEBUG oslo_concurrency.lockutils [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] Acquiring lock "refresh_cache-e753da08-d4a5-4f17-85c8-154e843798c9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.391217] env[63241]: DEBUG oslo_concurrency.lockutils [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] Acquired lock "refresh_cache-e753da08-d4a5-4f17-85c8-154e843798c9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.391544] env[63241]: DEBUG nova.network.neutron [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1512.396983] env[63241]: INFO nova.compute.manager [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Took 42.20 seconds to build instance. [ 1512.475427] env[63241]: DEBUG oslo_concurrency.lockutils [req-219e6b34-65b3-4067-bfa4-6f67af449271 req-4dbbfced-c4fe-411d-991e-8e7ab2451888 service nova] Releasing lock "refresh_cache-eaed706d-b3db-46ed-8c70-08f80479afa4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.589094] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.589706] env[63241]: DEBUG nova.compute.manager [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1512.595597] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.157s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.595597] env[63241]: INFO nova.compute.claims [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1512.644384] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521c640c-b268-d1d3-5b45-4656dafd2c67, 'name': SearchDatastore_Task, 'duration_secs': 0.010074} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.644672] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.644929] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] eaed706d-b3db-46ed-8c70-08f80479afa4/eaed706d-b3db-46ed-8c70-08f80479afa4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1512.645203] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c925f0d4-e743-400a-9fb9-7fbb6500eb2c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.653883] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for the task: (returnval){ [ 1512.653883] env[63241]: value = "task-1820291" [ 1512.653883] env[63241]: _type = "Task" [ 1512.653883] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.662764] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.848709] env[63241]: DEBUG oslo_vmware.api [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1820290, 'name': PowerOffVM_Task, 'duration_secs': 0.308294} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.848984] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1512.849199] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1512.849474] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-efbcad79-6750-46f6-b014-5775781ce21f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.901355] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff2b2db0-5ea4-4d77-87a4-6d563b613bf6 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "e753da08-d4a5-4f17-85c8-154e843798c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.622s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.924965] env[63241]: DEBUG nova.objects.instance [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lazy-loading 'flavor' on Instance uuid 2d1425f2-ddf9-4e82-bcfe-e11c597d011a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1512.998758] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1512.999344] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1512.999344] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Deleting the datastore file [datastore1] 5fce9350-6d45-4bfb-a74b-f5b384ecb16c {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1512.999518] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9735631-e69b-4bf2-bcdc-504e4e2e0c23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.009407] env[63241]: DEBUG oslo_vmware.api [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for the task: (returnval){ [ 1513.009407] env[63241]: value = "task-1820293" [ 1513.009407] env[63241]: _type = "Task" [ 1513.009407] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.021281] env[63241]: DEBUG oslo_vmware.api [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1820293, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.094872] env[63241]: DEBUG nova.compute.utils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1513.096482] env[63241]: DEBUG nova.compute.manager [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1513.096674] env[63241]: DEBUG nova.network.neutron [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1513.150866] env[63241]: DEBUG nova.policy [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37331fb003154d989da99dae6dde9078', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0350a83a316341dabe7c8665737a3888', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1513.169433] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820291, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504415} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.169433] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] eaed706d-b3db-46ed-8c70-08f80479afa4/eaed706d-b3db-46ed-8c70-08f80479afa4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1513.169433] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1513.169433] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa28ddf6-d67e-4f7a-9faf-76ff6946dd95 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.176346] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for the task: (returnval){ [ 1513.176346] env[63241]: value = "task-1820294" [ 1513.176346] env[63241]: _type = "Task" [ 1513.176346] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.186782] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820294, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.331039] env[63241]: DEBUG nova.network.neutron [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Updating instance_info_cache with network_info: [{"id": "5727d5d3-d1ae-4830-a899-52c5d7ea9414", "address": "fa:16:3e:eb:0e:66", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5727d5d3-d1", "ovs_interfaceid": "5727d5d3-d1ae-4830-a899-52c5d7ea9414", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1513.407669] env[63241]: DEBUG nova.compute.manager [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1513.432261] env[63241]: DEBUG oslo_concurrency.lockutils [None req-854bcd7e-c3f0-461c-bbe0-0578235f3b96 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.843s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.438664] env[63241]: DEBUG nova.network.neutron [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Successfully created port: ef178c90-94df-45d1-97ca-ab5ef401691b {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1513.519654] env[63241]: DEBUG oslo_vmware.api [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Task: {'id': task-1820293, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256627} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.519968] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1513.520169] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1513.520365] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1513.520541] env[63241]: INFO nova.compute.manager [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1513.520781] env[63241]: DEBUG oslo.service.loopingcall [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1513.521335] env[63241]: DEBUG nova.compute.manager [-] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1513.521441] env[63241]: DEBUG nova.network.neutron [-] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1513.600256] env[63241]: DEBUG nova.compute.manager [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1513.688123] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820294, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088228} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.692744] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1513.698734] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1692446d-c239-4706-836f-286308221608 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.724136] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] eaed706d-b3db-46ed-8c70-08f80479afa4/eaed706d-b3db-46ed-8c70-08f80479afa4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1513.728388] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-270de86d-a61c-40fe-a199-2e7ce52f2192 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.752895] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for the task: (returnval){ [ 1513.752895] env[63241]: value = "task-1820295" [ 1513.752895] env[63241]: _type = "Task" [ 1513.752895] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.768479] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820295, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.837018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] Releasing lock "refresh_cache-e753da08-d4a5-4f17-85c8-154e843798c9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1513.837018] env[63241]: DEBUG nova.compute.manager [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Inject network info {{(pid=63241) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1513.837018] env[63241]: DEBUG nova.compute.manager [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] network_info to inject: |[{"id": "5727d5d3-d1ae-4830-a899-52c5d7ea9414", "address": "fa:16:3e:eb:0e:66", "network": {"id": "b337daaf-30b2-4654-86b7-d5c28b4a88f2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1218980603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38c709b68d2a40049d6d4795267987d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5727d5d3-d1", "ovs_interfaceid": "5727d5d3-d1ae-4830-a899-52c5d7ea9414", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1513.841120] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Reconfiguring VM instance to set the machine id {{(pid=63241) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1513.844641] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6df9c8b-ce8a-46d5-a257-604df3ee5e89 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.867022] env[63241]: DEBUG oslo_vmware.api [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] Waiting for the task: (returnval){ [ 1513.867022] env[63241]: value = "task-1820296" [ 1513.867022] env[63241]: _type = "Task" [ 1513.867022] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.875451] env[63241]: DEBUG oslo_vmware.api [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] Task: {'id': task-1820296, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.924319] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.202908] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.202908] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.202908] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.203334] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.203334] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.205142] env[63241]: INFO nova.compute.manager [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Terminating instance [ 1514.207156] env[63241]: DEBUG nova.compute.manager [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1514.207344] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1514.207577] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d85c3f12-3f66-4edd-b8fb-bed102ab6ac1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.216228] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215c998a-15e7-422d-b821-a7a80cabd8f1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.220728] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1514.220728] env[63241]: value = "task-1820297" [ 1514.220728] env[63241]: _type = "Task" [ 1514.220728] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.228580] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e090fafb-7d93-42b4-ae8f-3c8689517e65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.234929] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.268092] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e36a976-9bd6-4606-94bd-bb4eea2455ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.279179] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820295, 'name': ReconfigVM_Task, 'duration_secs': 0.312243} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.281916] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Reconfigured VM instance instance-0000002f to attach disk [datastore1] eaed706d-b3db-46ed-8c70-08f80479afa4/eaed706d-b3db-46ed-8c70-08f80479afa4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1514.283022] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b65fb572-4f8a-4144-bf04-57a65daf1e4c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.286054] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28fe01c-4951-4500-aeb1-bea1d1fc0d94 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.302681] env[63241]: DEBUG nova.compute.provider_tree [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1514.309975] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for the task: (returnval){ [ 1514.309975] env[63241]: value = "task-1820298" [ 1514.309975] env[63241]: _type = "Task" [ 1514.309975] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.321200] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820298, 'name': Rename_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.376552] env[63241]: DEBUG oslo_vmware.api [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] Task: {'id': task-1820296, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.613650] env[63241]: DEBUG nova.compute.manager [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1514.651597] env[63241]: DEBUG nova.virt.hardware [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1514.652103] env[63241]: DEBUG nova.virt.hardware [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1514.652389] env[63241]: DEBUG nova.virt.hardware [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1514.652685] env[63241]: DEBUG nova.virt.hardware [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1514.653076] env[63241]: DEBUG nova.virt.hardware [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1514.653354] env[63241]: DEBUG nova.virt.hardware [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1514.653856] env[63241]: DEBUG nova.virt.hardware [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1514.654152] env[63241]: DEBUG nova.virt.hardware [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1514.654634] env[63241]: DEBUG nova.virt.hardware [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1514.654947] env[63241]: DEBUG nova.virt.hardware [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1514.655261] env[63241]: DEBUG nova.virt.hardware [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1514.656611] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27533dd2-8a54-4f3b-af26-f94aef71a1c7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.667123] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccde1319-cb5a-4929-87eb-d818dad33f09 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.734338] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820297, 'name': PowerOffVM_Task, 'duration_secs': 0.364504} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.734936] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1514.735250] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1514.735775] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377058', 'volume_id': '037b9a76-8815-4762-9780-c0a21b05b3bd', 'name': 'volume-037b9a76-8815-4762-9780-c0a21b05b3bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2d1425f2-ddf9-4e82-bcfe-e11c597d011a', 'attached_at': '', 'detached_at': '', 'volume_id': '037b9a76-8815-4762-9780-c0a21b05b3bd', 'serial': '037b9a76-8815-4762-9780-c0a21b05b3bd'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1514.736907] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52382574-54c8-4aef-9e08-80534a2bae03 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.770189] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dcd11a4-e763-497f-82ad-ac0d293f9317 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.779334] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297bb541-ab60-4013-bc4f-e877ba4dfecc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.807747] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ef3560-078e-44d0-96ca-799eafacf231 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.812177] env[63241]: DEBUG nova.scheduler.client.report [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1514.828296] env[63241]: DEBUG nova.compute.manager [req-a887419d-961a-4f3f-8dd8-4e181e5587ad req-6f269da5-d6c9-459a-9ac7-d235b4403141 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Received event network-vif-deleted-03370c0c-303d-4511-8cd8-44be5bad305a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1514.828571] env[63241]: INFO nova.compute.manager [req-a887419d-961a-4f3f-8dd8-4e181e5587ad req-6f269da5-d6c9-459a-9ac7-d235b4403141 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Neutron deleted interface 03370c0c-303d-4511-8cd8-44be5bad305a; detaching it from the instance and deleting it from the info cache [ 1514.828973] env[63241]: DEBUG nova.network.neutron [req-a887419d-961a-4f3f-8dd8-4e181e5587ad req-6f269da5-d6c9-459a-9ac7-d235b4403141 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.842255] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] The volume has not been displaced from its original location: [datastore1] volume-037b9a76-8815-4762-9780-c0a21b05b3bd/volume-037b9a76-8815-4762-9780-c0a21b05b3bd.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1514.848046] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Reconfiguring VM instance instance-0000002d to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1514.853937] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a74b5bc3-4eac-4015-a1f8-7ffc3ac21745 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.870311] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820298, 'name': Rename_Task, 'duration_secs': 0.180533} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.871518] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1514.875012] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1063426-c3bd-4347-8c25-eb318ff2c872 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.884423] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1514.884423] env[63241]: value = "task-1820299" [ 1514.884423] env[63241]: _type = "Task" [ 1514.884423] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.888851] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for the task: (returnval){ [ 1514.888851] env[63241]: value = "task-1820300" [ 1514.888851] env[63241]: _type = "Task" [ 1514.888851] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.888851] env[63241]: DEBUG oslo_vmware.api [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] Task: {'id': task-1820296, 'name': ReconfigVM_Task, 'duration_secs': 0.638853} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.892431] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-746e5e24-58ce-481d-bfa6-573965157cd1 tempest-ServersAdminTestJSON-1069499579 tempest-ServersAdminTestJSON-1069499579-project-admin] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Reconfigured VM instance to set the machine id {{(pid=63241) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1514.901336] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820299, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.904733] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820300, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.028311] env[63241]: DEBUG nova.network.neutron [-] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.327274] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.733s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.327274] env[63241]: DEBUG nova.compute.manager [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1515.331024] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.135s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.332396] env[63241]: INFO nova.compute.claims [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1515.356039] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-adf6a46e-3cf4-482e-9a5f-cf4364f2e07d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.369877] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47387bc8-cde1-4eaa-b8d6-66b9051c5420 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.412328] env[63241]: DEBUG nova.compute.manager [req-a887419d-961a-4f3f-8dd8-4e181e5587ad req-6f269da5-d6c9-459a-9ac7-d235b4403141 service nova] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Detach interface failed, port_id=03370c0c-303d-4511-8cd8-44be5bad305a, reason: Instance 5fce9350-6d45-4bfb-a74b-f5b384ecb16c could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1515.425904] env[63241]: DEBUG oslo_vmware.api [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820300, 'name': PowerOnVM_Task, 'duration_secs': 0.496121} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.426228] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820299, 'name': ReconfigVM_Task, 'duration_secs': 0.328038} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.426741] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1515.426947] env[63241]: INFO nova.compute.manager [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Took 7.73 seconds to spawn the instance on the hypervisor. [ 1515.430021] env[63241]: DEBUG nova.compute.manager [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1515.430021] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Reconfigured VM instance instance-0000002d to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1515.434184] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a91af0-b036-4cfd-8ae7-1f17685efa57 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.437270] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49c5a7f2-8897-4df9-b755-0568673f7b25 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.458146] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1515.458146] env[63241]: value = "task-1820301" [ 1515.458146] env[63241]: _type = "Task" [ 1515.458146] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.468955] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820301, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.534820] env[63241]: INFO nova.compute.manager [-] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Took 2.01 seconds to deallocate network for instance. [ 1515.553186] env[63241]: DEBUG nova.compute.manager [req-52683051-5e13-43d5-b9c7-4bbe2a3acd82 req-7147ce30-267e-43b1-a188-7f0ca601db1a service nova] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Received event network-vif-plugged-ef178c90-94df-45d1-97ca-ab5ef401691b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1515.553403] env[63241]: DEBUG oslo_concurrency.lockutils [req-52683051-5e13-43d5-b9c7-4bbe2a3acd82 req-7147ce30-267e-43b1-a188-7f0ca601db1a service nova] Acquiring lock "3dfeaf57-2244-418e-a04a-ed4143e454d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.553617] env[63241]: DEBUG oslo_concurrency.lockutils [req-52683051-5e13-43d5-b9c7-4bbe2a3acd82 req-7147ce30-267e-43b1-a188-7f0ca601db1a service nova] Lock "3dfeaf57-2244-418e-a04a-ed4143e454d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.553786] env[63241]: DEBUG oslo_concurrency.lockutils [req-52683051-5e13-43d5-b9c7-4bbe2a3acd82 req-7147ce30-267e-43b1-a188-7f0ca601db1a service nova] Lock "3dfeaf57-2244-418e-a04a-ed4143e454d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.553955] env[63241]: DEBUG nova.compute.manager [req-52683051-5e13-43d5-b9c7-4bbe2a3acd82 req-7147ce30-267e-43b1-a188-7f0ca601db1a service nova] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] No waiting events found dispatching network-vif-plugged-ef178c90-94df-45d1-97ca-ab5ef401691b {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1515.554136] env[63241]: WARNING nova.compute.manager [req-52683051-5e13-43d5-b9c7-4bbe2a3acd82 req-7147ce30-267e-43b1-a188-7f0ca601db1a service nova] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Received unexpected event network-vif-plugged-ef178c90-94df-45d1-97ca-ab5ef401691b for instance with vm_state building and task_state spawning. [ 1515.608800] env[63241]: DEBUG nova.network.neutron [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Successfully updated port: ef178c90-94df-45d1-97ca-ab5ef401691b {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1515.837638] env[63241]: DEBUG nova.compute.utils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1515.843489] env[63241]: DEBUG nova.compute.manager [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1515.843489] env[63241]: DEBUG nova.network.neutron [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1515.906126] env[63241]: DEBUG nova.policy [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa971675dc4440df813844c1ed2f2444', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e6e05a4fd294679b512d6a4dcfebd3f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1515.928022] env[63241]: INFO nova.compute.manager [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Rebuilding instance [ 1515.976663] env[63241]: INFO nova.compute.manager [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Took 43.50 seconds to build instance. [ 1515.986936] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820301, 'name': ReconfigVM_Task, 'duration_secs': 0.41632} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.986936] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377058', 'volume_id': '037b9a76-8815-4762-9780-c0a21b05b3bd', 'name': 'volume-037b9a76-8815-4762-9780-c0a21b05b3bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2d1425f2-ddf9-4e82-bcfe-e11c597d011a', 'attached_at': '', 'detached_at': '', 'volume_id': '037b9a76-8815-4762-9780-c0a21b05b3bd', 'serial': '037b9a76-8815-4762-9780-c0a21b05b3bd'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1515.986936] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1515.986936] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157bf7f3-b063-46e4-bb43-ec5ce1096c74 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.993823] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1515.994329] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29084b26-e078-4762-acda-caf2a7feaa76 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.000249] env[63241]: DEBUG nova.compute.manager [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1516.001061] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40be8e29-9e71-46a7-be4e-b86255491e22 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.044600] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.112247] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquiring lock "refresh_cache-3dfeaf57-2244-418e-a04a-ed4143e454d5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.112427] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquired lock "refresh_cache-3dfeaf57-2244-418e-a04a-ed4143e454d5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.112621] env[63241]: DEBUG nova.network.neutron [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1516.148316] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1516.148601] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1516.149469] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleting the datastore file [datastore1] 2d1425f2-ddf9-4e82-bcfe-e11c597d011a {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1516.149469] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c5f492e-507c-4b44-a8bf-5588e9a19f38 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.159689] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1516.159689] env[63241]: value = "task-1820303" [ 1516.159689] env[63241]: _type = "Task" [ 1516.159689] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.169424] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820303, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.330364] env[63241]: DEBUG nova.network.neutron [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Successfully created port: 0457ca89-42e2-485c-a958-773620259283 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1516.347151] env[63241]: DEBUG nova.compute.manager [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1516.480682] env[63241]: DEBUG oslo_concurrency.lockutils [None req-588c793d-f629-4764-9bcd-ef87361c1fbf tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Lock "eaed706d-b3db-46ed-8c70-08f80479afa4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.424s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.513553] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1516.515888] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0db9ee5f-c0a1-4bdf-b7c2-ba3855142343 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.527058] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1516.527058] env[63241]: value = "task-1820304" [ 1516.527058] env[63241]: _type = "Task" [ 1516.527058] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.542985] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820304, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.660136] env[63241]: DEBUG nova.network.neutron [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1516.676230] env[63241]: DEBUG oslo_vmware.api [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820303, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288248} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.676230] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1516.676230] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1516.676230] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1516.676230] env[63241]: INFO nova.compute.manager [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Took 2.47 seconds to destroy the instance on the hypervisor. [ 1516.676512] env[63241]: DEBUG oslo.service.loopingcall [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1516.676548] env[63241]: DEBUG nova.compute.manager [-] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1516.676635] env[63241]: DEBUG nova.network.neutron [-] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1516.918814] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0a0347-503a-4e1b-88fa-8c52a6b680e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.928514] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c2ff3d-7bf9-4f81-bfdb-3b8a559f9fa7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.971134] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1601a8-e535-48f5-a3cb-be9b1654d668 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.980126] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ab13e0-4ef5-48b0-9c4f-68519653076f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.994458] env[63241]: DEBUG nova.compute.provider_tree [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1517.036930] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820304, 'name': PowerOffVM_Task, 'duration_secs': 0.201375} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.037312] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1517.037557] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1517.038341] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712b38da-beda-4558-b979-fd01cf5ddd24 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.045884] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1517.046107] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56818f2e-76a5-4a2b-baaa-703340e23de7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.084489] env[63241]: DEBUG nova.network.neutron [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Updating instance_info_cache with network_info: [{"id": "ef178c90-94df-45d1-97ca-ab5ef401691b", "address": "fa:16:3e:35:f1:90", "network": {"id": "1a9aefb8-cc3e-49da-84d5-cfcac63e18d6", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1414473039-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0350a83a316341dabe7c8665737a3888", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef178c90-94", "ovs_interfaceid": "ef178c90-94df-45d1-97ca-ab5ef401691b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.150230] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1517.150475] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1517.150658] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleting the datastore file [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1517.150935] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f378755b-ca10-4b8c-bc22-d4632da77011 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.158159] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1517.158159] env[63241]: value = "task-1820306" [ 1517.158159] env[63241]: _type = "Task" [ 1517.158159] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.167255] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820306, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.364352] env[63241]: DEBUG nova.compute.manager [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1517.394935] env[63241]: DEBUG nova.virt.hardware [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1517.395184] env[63241]: DEBUG nova.virt.hardware [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1517.395347] env[63241]: DEBUG nova.virt.hardware [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1517.395532] env[63241]: DEBUG nova.virt.hardware [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1517.395732] env[63241]: DEBUG nova.virt.hardware [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1517.395824] env[63241]: DEBUG nova.virt.hardware [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1517.396056] env[63241]: DEBUG nova.virt.hardware [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1517.396268] env[63241]: DEBUG nova.virt.hardware [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1517.396452] env[63241]: DEBUG nova.virt.hardware [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1517.396622] env[63241]: DEBUG nova.virt.hardware [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1517.396793] env[63241]: DEBUG nova.virt.hardware [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1517.397690] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9aaeff6-fa84-45f7-991b-16dfed4721d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.409855] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d7b96a-786e-40b0-acd1-997ffe986ac1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.488386] env[63241]: DEBUG nova.compute.manager [req-a06f8ced-a63e-43da-94f1-0a68f60922d8 req-9fb64bf9-40e5-402b-924b-99f4654c05df service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Received event network-vif-deleted-94476e06-60a2-4a38-8724-4dadaf22dfa0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1517.488553] env[63241]: INFO nova.compute.manager [req-a06f8ced-a63e-43da-94f1-0a68f60922d8 req-9fb64bf9-40e5-402b-924b-99f4654c05df service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Neutron deleted interface 94476e06-60a2-4a38-8724-4dadaf22dfa0; detaching it from the instance and deleting it from the info cache [ 1517.488728] env[63241]: DEBUG nova.network.neutron [req-a06f8ced-a63e-43da-94f1-0a68f60922d8 req-9fb64bf9-40e5-402b-924b-99f4654c05df service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.500342] env[63241]: DEBUG nova.scheduler.client.report [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1517.587300] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Releasing lock "refresh_cache-3dfeaf57-2244-418e-a04a-ed4143e454d5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.587684] env[63241]: DEBUG nova.compute.manager [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Instance network_info: |[{"id": "ef178c90-94df-45d1-97ca-ab5ef401691b", "address": "fa:16:3e:35:f1:90", "network": {"id": "1a9aefb8-cc3e-49da-84d5-cfcac63e18d6", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1414473039-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0350a83a316341dabe7c8665737a3888", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef178c90-94", "ovs_interfaceid": "ef178c90-94df-45d1-97ca-ab5ef401691b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1517.588127] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:f1:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee617cec-01ea-4a11-ac04-ef9767f4c86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef178c90-94df-45d1-97ca-ab5ef401691b', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1517.596958] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Creating folder: Project (0350a83a316341dabe7c8665737a3888). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1517.597332] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1eb097e1-9304-4699-be4f-082ee8cf8bd8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.610217] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Created folder: Project (0350a83a316341dabe7c8665737a3888) in parent group-v376927. [ 1517.610439] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Creating folder: Instances. Parent ref: group-v377062. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1517.610690] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9604023d-c456-4733-893a-d9a42c3e210d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.622482] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Created folder: Instances in parent group-v377062. [ 1517.622791] env[63241]: DEBUG oslo.service.loopingcall [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1517.622937] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1517.623167] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79a58402-d891-4df5-9ba0-c6017a4861d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.644774] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1517.644774] env[63241]: value = "task-1820309" [ 1517.644774] env[63241]: _type = "Task" [ 1517.644774] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.653803] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820309, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.669807] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820306, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266535} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.670120] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1517.670359] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1517.670545] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1517.715086] env[63241]: DEBUG nova.compute.manager [req-a56ba4fc-4e35-4194-98a2-058ac6898179 req-cd34d81d-7a8c-4551-8cb0-b03e699eb8fb service nova] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Received event network-changed-ef178c90-94df-45d1-97ca-ab5ef401691b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1517.715290] env[63241]: DEBUG nova.compute.manager [req-a56ba4fc-4e35-4194-98a2-058ac6898179 req-cd34d81d-7a8c-4551-8cb0-b03e699eb8fb service nova] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Refreshing instance network info cache due to event network-changed-ef178c90-94df-45d1-97ca-ab5ef401691b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1517.715501] env[63241]: DEBUG oslo_concurrency.lockutils [req-a56ba4fc-4e35-4194-98a2-058ac6898179 req-cd34d81d-7a8c-4551-8cb0-b03e699eb8fb service nova] Acquiring lock "refresh_cache-3dfeaf57-2244-418e-a04a-ed4143e454d5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.715640] env[63241]: DEBUG oslo_concurrency.lockutils [req-a56ba4fc-4e35-4194-98a2-058ac6898179 req-cd34d81d-7a8c-4551-8cb0-b03e699eb8fb service nova] Acquired lock "refresh_cache-3dfeaf57-2244-418e-a04a-ed4143e454d5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.715798] env[63241]: DEBUG nova.network.neutron [req-a56ba4fc-4e35-4194-98a2-058ac6898179 req-cd34d81d-7a8c-4551-8cb0-b03e699eb8fb service nova] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Refreshing network info cache for port ef178c90-94df-45d1-97ca-ab5ef401691b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1517.730602] env[63241]: DEBUG nova.network.neutron [-] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.940863] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquiring lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.940863] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.956526] env[63241]: DEBUG nova.network.neutron [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Successfully updated port: 0457ca89-42e2-485c-a958-773620259283 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1517.992556] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22c70798-5bf9-4c5e-a450-e4a88eb77a44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.005149] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b2fa89-7556-49a3-b259-26ca85531645 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.022334] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.022860] env[63241]: DEBUG nova.compute.manager [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1518.026313] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.691s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.026597] env[63241]: DEBUG nova.objects.instance [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lazy-loading 'resources' on Instance uuid c1c85cc0-53f1-4920-8f3e-6dd69414fa85 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1518.055526] env[63241]: DEBUG nova.compute.manager [req-a06f8ced-a63e-43da-94f1-0a68f60922d8 req-9fb64bf9-40e5-402b-924b-99f4654c05df service nova] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Detach interface failed, port_id=94476e06-60a2-4a38-8724-4dadaf22dfa0, reason: Instance 2d1425f2-ddf9-4e82-bcfe-e11c597d011a could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1518.155643] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820309, 'name': CreateVM_Task, 'duration_secs': 0.481913} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.155904] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1518.156644] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.156906] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.157257] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1518.157514] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a8de31f-82cc-4056-b612-a42edfe65bcf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.162808] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for the task: (returnval){ [ 1518.162808] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52193850-6b49-17dd-5bcd-4a8826237e2c" [ 1518.162808] env[63241]: _type = "Task" [ 1518.162808] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.172117] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52193850-6b49-17dd-5bcd-4a8826237e2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.234956] env[63241]: INFO nova.compute.manager [-] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Took 1.56 seconds to deallocate network for instance. [ 1518.444732] env[63241]: DEBUG nova.compute.manager [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1518.453038] env[63241]: DEBUG nova.network.neutron [req-a56ba4fc-4e35-4194-98a2-058ac6898179 req-cd34d81d-7a8c-4551-8cb0-b03e699eb8fb service nova] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Updated VIF entry in instance network info cache for port ef178c90-94df-45d1-97ca-ab5ef401691b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1518.453434] env[63241]: DEBUG nova.network.neutron [req-a56ba4fc-4e35-4194-98a2-058ac6898179 req-cd34d81d-7a8c-4551-8cb0-b03e699eb8fb service nova] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Updating instance_info_cache with network_info: [{"id": "ef178c90-94df-45d1-97ca-ab5ef401691b", "address": "fa:16:3e:35:f1:90", "network": {"id": "1a9aefb8-cc3e-49da-84d5-cfcac63e18d6", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1414473039-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0350a83a316341dabe7c8665737a3888", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee617cec-01ea-4a11-ac04-ef9767f4c86d", "external-id": "nsx-vlan-transportzone-11", "segmentation_id": 11, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef178c90-94", "ovs_interfaceid": "ef178c90-94df-45d1-97ca-ab5ef401691b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.458725] env[63241]: DEBUG oslo_concurrency.lockutils [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquiring lock "eaed706d-b3db-46ed-8c70-08f80479afa4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.458987] env[63241]: DEBUG oslo_concurrency.lockutils [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Lock "eaed706d-b3db-46ed-8c70-08f80479afa4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.459625] env[63241]: DEBUG oslo_concurrency.lockutils [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquiring lock "eaed706d-b3db-46ed-8c70-08f80479afa4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.459625] env[63241]: DEBUG oslo_concurrency.lockutils [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Lock "eaed706d-b3db-46ed-8c70-08f80479afa4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.459625] env[63241]: DEBUG oslo_concurrency.lockutils [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Lock "eaed706d-b3db-46ed-8c70-08f80479afa4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.463716] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.463716] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.463716] env[63241]: DEBUG nova.network.neutron [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1518.464185] env[63241]: INFO nova.compute.manager [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Terminating instance [ 1518.466191] env[63241]: DEBUG nova.compute.manager [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1518.466390] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1518.467250] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89eaeef8-7372-463c-85e5-ea92f0817244 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.476336] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1518.476780] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffbab88f-15c9-4a85-9312-edb1a344b282 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.484539] env[63241]: DEBUG oslo_vmware.api [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for the task: (returnval){ [ 1518.484539] env[63241]: value = "task-1820310" [ 1518.484539] env[63241]: _type = "Task" [ 1518.484539] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.494265] env[63241]: DEBUG oslo_vmware.api [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820310, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.534355] env[63241]: DEBUG nova.compute.utils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1518.535869] env[63241]: DEBUG nova.compute.manager [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1518.536458] env[63241]: DEBUG nova.network.neutron [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1518.580028] env[63241]: DEBUG nova.policy [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3918943329014fa9b2de646fed7d2714', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fedeb3768ebc4b96bd5a85bfb0a03cf8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1518.676673] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52193850-6b49-17dd-5bcd-4a8826237e2c, 'name': SearchDatastore_Task, 'duration_secs': 0.012579} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.679079] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.679494] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1518.679614] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.679767] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.679947] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1518.684765] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c4c73aa-4e0d-4b46-8375-f7a1bed63fe4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.708360] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1518.708684] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1518.709596] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2e532e3-86c1-47e2-a148-3164306f9ac1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.716377] env[63241]: DEBUG nova.virt.hardware [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1518.716619] env[63241]: DEBUG nova.virt.hardware [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1518.716777] env[63241]: DEBUG nova.virt.hardware [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1518.716962] env[63241]: DEBUG nova.virt.hardware [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1518.717291] env[63241]: DEBUG nova.virt.hardware [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1518.717466] env[63241]: DEBUG nova.virt.hardware [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1518.717773] env[63241]: DEBUG nova.virt.hardware [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1518.717972] env[63241]: DEBUG nova.virt.hardware [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1518.718175] env[63241]: DEBUG nova.virt.hardware [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1518.718343] env[63241]: DEBUG nova.virt.hardware [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1518.718516] env[63241]: DEBUG nova.virt.hardware [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1518.721720] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c070afe-135e-4329-8774-416b2d6d3236 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.728208] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for the task: (returnval){ [ 1518.728208] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]525cde47-7c7d-c84c-33ad-8ac0901fa67d" [ 1518.728208] env[63241]: _type = "Task" [ 1518.728208] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.736642] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2929abd-b6a4-4114-993d-6cd2e7784c24 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.750754] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525cde47-7c7d-c84c-33ad-8ac0901fa67d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.758848] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:50:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '329d0e4b-4190-484a-8560-9356dc31beca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc8209bb-1cd1-4efc-806c-3fb04ffc73c5', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1518.767275] env[63241]: DEBUG oslo.service.loopingcall [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1518.770143] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1518.770270] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57433351-b406-4218-a711-ce27f57631a9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.787157] env[63241]: INFO nova.compute.manager [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Took 0.55 seconds to detach 1 volumes for instance. [ 1518.799417] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1518.799417] env[63241]: value = "task-1820311" [ 1518.799417] env[63241]: _type = "Task" [ 1518.799417] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.808875] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820311, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.900974] env[63241]: DEBUG nova.network.neutron [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Successfully created port: dcdf6593-f699-4bf0-8fa5-16a49caabae8 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1518.956771] env[63241]: DEBUG oslo_concurrency.lockutils [req-a56ba4fc-4e35-4194-98a2-058ac6898179 req-cd34d81d-7a8c-4551-8cb0-b03e699eb8fb service nova] Releasing lock "refresh_cache-3dfeaf57-2244-418e-a04a-ed4143e454d5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.973442] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.997251] env[63241]: DEBUG oslo_vmware.api [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820310, 'name': PowerOffVM_Task, 'duration_secs': 0.298687} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.997251] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1518.997335] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1518.999958] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c59d23d0-01ce-47da-bac4-0fa473788b13 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.005373] env[63241]: DEBUG nova.network.neutron [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1519.039183] env[63241]: DEBUG nova.compute.utils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1519.109410] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511c0037-1079-4a29-9a67-27b0e51eb4fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.119182] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2d3b6f-6b87-44a2-af64-043ba39d2abb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.156157] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de3b0e5-b7b2-48a0-9d11-c012072439ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.166234] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fa1c48-e757-46ff-9227-f904f75f435a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.184258] env[63241]: DEBUG nova.compute.provider_tree [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1519.187873] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1519.188104] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1519.188290] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Deleting the datastore file [datastore1] eaed706d-b3db-46ed-8c70-08f80479afa4 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1519.188544] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e88f4e29-0af0-476e-865f-b867f8375745 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.195943] env[63241]: DEBUG oslo_vmware.api [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for the task: (returnval){ [ 1519.195943] env[63241]: value = "task-1820313" [ 1519.195943] env[63241]: _type = "Task" [ 1519.195943] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.208758] env[63241]: DEBUG oslo_vmware.api [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820313, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.242370] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525cde47-7c7d-c84c-33ad-8ac0901fa67d, 'name': SearchDatastore_Task, 'duration_secs': 0.364602} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.243360] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bb157ca-a24c-4d52-a90d-9512826baa13 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.249530] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for the task: (returnval){ [ 1519.249530] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a1bcd7-e2bd-efae-734e-ccf07b76c60b" [ 1519.249530] env[63241]: _type = "Task" [ 1519.249530] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.258614] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a1bcd7-e2bd-efae-734e-ccf07b76c60b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.282548] env[63241]: DEBUG nova.network.neutron [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance_info_cache with network_info: [{"id": "0457ca89-42e2-485c-a958-773620259283", "address": "fa:16:3e:e2:4e:b4", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0457ca89-42", "ovs_interfaceid": "0457ca89-42e2-485c-a958-773620259283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.295253] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.312895] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820311, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.542241] env[63241]: DEBUG nova.compute.manager [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1519.687923] env[63241]: DEBUG nova.scheduler.client.report [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1519.706955] env[63241]: DEBUG oslo_vmware.api [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Task: {'id': task-1820313, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197569} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.707252] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1519.707447] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1519.707626] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1519.707804] env[63241]: INFO nova.compute.manager [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1519.708062] env[63241]: DEBUG oslo.service.loopingcall [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.708265] env[63241]: DEBUG nova.compute.manager [-] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1519.708373] env[63241]: DEBUG nova.network.neutron [-] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1519.761482] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a1bcd7-e2bd-efae-734e-ccf07b76c60b, 'name': SearchDatastore_Task, 'duration_secs': 0.025172} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.761787] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.762097] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 3dfeaf57-2244-418e-a04a-ed4143e454d5/3dfeaf57-2244-418e-a04a-ed4143e454d5.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1519.762417] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-286e0258-df31-43d8-9d28-2b9fd78b94b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.770546] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for the task: (returnval){ [ 1519.770546] env[63241]: value = "task-1820314" [ 1519.770546] env[63241]: _type = "Task" [ 1519.770546] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.779803] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820314, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.785431] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.785796] env[63241]: DEBUG nova.compute.manager [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Instance network_info: |[{"id": "0457ca89-42e2-485c-a958-773620259283", "address": "fa:16:3e:e2:4e:b4", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0457ca89-42", "ovs_interfaceid": "0457ca89-42e2-485c-a958-773620259283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1519.787242] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:4e:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0457ca89-42e2-485c-a958-773620259283', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1519.796555] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Creating folder: Project (4e6e05a4fd294679b512d6a4dcfebd3f). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1519.797933] env[63241]: DEBUG nova.compute.manager [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Received event network-vif-plugged-0457ca89-42e2-485c-a958-773620259283 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1519.798180] env[63241]: DEBUG oslo_concurrency.lockutils [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] Acquiring lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.798401] env[63241]: DEBUG oslo_concurrency.lockutils [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.798612] env[63241]: DEBUG oslo_concurrency.lockutils [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.798889] env[63241]: DEBUG nova.compute.manager [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] No waiting events found dispatching network-vif-plugged-0457ca89-42e2-485c-a958-773620259283 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1519.799168] env[63241]: WARNING nova.compute.manager [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Received unexpected event network-vif-plugged-0457ca89-42e2-485c-a958-773620259283 for instance with vm_state building and task_state spawning. [ 1519.799364] env[63241]: DEBUG nova.compute.manager [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Received event network-changed-0457ca89-42e2-485c-a958-773620259283 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1519.799565] env[63241]: DEBUG nova.compute.manager [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Refreshing instance network info cache due to event network-changed-0457ca89-42e2-485c-a958-773620259283. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1519.799720] env[63241]: DEBUG oslo_concurrency.lockutils [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] Acquiring lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.799856] env[63241]: DEBUG oslo_concurrency.lockutils [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] Acquired lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.800015] env[63241]: DEBUG nova.network.neutron [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Refreshing network info cache for port 0457ca89-42e2-485c-a958-773620259283 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1519.801736] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ce6ae4c-fc34-4d4c-9ade-a4a5f5e5eeed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.819679] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820311, 'name': CreateVM_Task, 'duration_secs': 0.561481} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.822737] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1519.822737] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Created folder: Project (4e6e05a4fd294679b512d6a4dcfebd3f) in parent group-v376927. [ 1519.822737] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Creating folder: Instances. Parent ref: group-v377066. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1519.824110] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.824370] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.824717] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1519.824980] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6f241c4-8bc7-491c-976e-5e5e4c7c2e22 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.826998] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b572b5ee-cba4-4738-a144-581441ceedc7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.835632] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1519.835632] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529e88d7-ed0f-1265-ae67-d39425911ec3" [ 1519.835632] env[63241]: _type = "Task" [ 1519.835632] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.841248] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Created folder: Instances in parent group-v377066. [ 1519.841548] env[63241]: DEBUG oslo.service.loopingcall [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.841743] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1519.841953] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a19330aa-4ae8-4d30-8a16-96be73c9e25e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.867139] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529e88d7-ed0f-1265-ae67-d39425911ec3, 'name': SearchDatastore_Task, 'duration_secs': 0.010531} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.867139] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.867139] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1519.867139] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.867139] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.867139] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1519.867139] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6cae2644-4590-4c17-bb15-9c91c701f98e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.871914] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1519.871914] env[63241]: value = "task-1820317" [ 1519.871914] env[63241]: _type = "Task" [ 1519.871914] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.881724] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820317, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.883060] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1519.883280] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1519.884036] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09475c35-5dd9-426b-8658-7f3e0a833db0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.890399] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1519.890399] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a5bdf6-2d6c-7729-e04f-b52668ccbfd9" [ 1519.890399] env[63241]: _type = "Task" [ 1519.890399] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.900044] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a5bdf6-2d6c-7729-e04f-b52668ccbfd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.198778] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.170s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.201715] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.764s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1520.202167] env[63241]: DEBUG nova.objects.instance [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Lazy-loading 'resources' on Instance uuid 3c51d4dc-5a2c-4483-9aa5-8bab532971d4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1520.228266] env[63241]: INFO nova.scheduler.client.report [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Deleted allocations for instance c1c85cc0-53f1-4920-8f3e-6dd69414fa85 [ 1520.286977] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820314, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.385227] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820317, 'name': CreateVM_Task, 'duration_secs': 0.50742} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.385425] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1520.386114] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.386263] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.386688] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1520.387328] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2909753-8619-4136-b79c-ad2218a1ff2f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.393307] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1520.393307] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52420e9e-7be4-ff3e-64b9-97ec2efd9940" [ 1520.393307] env[63241]: _type = "Task" [ 1520.393307] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.406120] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a5bdf6-2d6c-7729-e04f-b52668ccbfd9, 'name': SearchDatastore_Task, 'duration_secs': 0.010622} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.411601] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52420e9e-7be4-ff3e-64b9-97ec2efd9940, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.411601] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-655812b0-c434-4d67-bf40-7174731bff42 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.419522] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1520.419522] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52bc215a-9956-0131-149d-4d86078709ee" [ 1520.419522] env[63241]: _type = "Task" [ 1520.419522] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.433082] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52bc215a-9956-0131-149d-4d86078709ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.436033] env[63241]: DEBUG nova.compute.manager [req-68a607cc-0b50-477e-bfeb-cec2591410d8 req-a96f21c5-127b-4752-95f7-7e7dbca932bf service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Received event network-vif-deleted-2f1329d9-5fe9-46cc-817a-c247a1999456 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1520.436272] env[63241]: INFO nova.compute.manager [req-68a607cc-0b50-477e-bfeb-cec2591410d8 req-a96f21c5-127b-4752-95f7-7e7dbca932bf service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Neutron deleted interface 2f1329d9-5fe9-46cc-817a-c247a1999456; detaching it from the instance and deleting it from the info cache [ 1520.436491] env[63241]: DEBUG nova.network.neutron [req-68a607cc-0b50-477e-bfeb-cec2591410d8 req-a96f21c5-127b-4752-95f7-7e7dbca932bf service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.500483] env[63241]: DEBUG nova.network.neutron [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Successfully updated port: dcdf6593-f699-4bf0-8fa5-16a49caabae8 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1520.553163] env[63241]: DEBUG nova.compute.manager [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1520.585460] env[63241]: DEBUG nova.virt.hardware [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:23:06Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='547591385',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1803104023',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1520.585722] env[63241]: DEBUG nova.virt.hardware [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1520.585877] env[63241]: DEBUG nova.virt.hardware [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1520.586266] env[63241]: DEBUG nova.virt.hardware [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1520.586608] env[63241]: DEBUG nova.virt.hardware [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1520.586822] env[63241]: DEBUG nova.virt.hardware [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1520.587141] env[63241]: DEBUG nova.virt.hardware [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1520.587352] env[63241]: DEBUG nova.virt.hardware [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1520.587583] env[63241]: DEBUG nova.virt.hardware [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1520.587806] env[63241]: DEBUG nova.virt.hardware [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1520.588048] env[63241]: DEBUG nova.virt.hardware [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1520.588965] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe28fb3-af68-4606-8303-898f2553028e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.599329] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142431eb-9596-4a43-a12b-eaa5dd93290c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.694050] env[63241]: DEBUG nova.network.neutron [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updated VIF entry in instance network info cache for port 0457ca89-42e2-485c-a958-773620259283. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1520.694327] env[63241]: DEBUG nova.network.neutron [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance_info_cache with network_info: [{"id": "0457ca89-42e2-485c-a958-773620259283", "address": "fa:16:3e:e2:4e:b4", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0457ca89-42", "ovs_interfaceid": "0457ca89-42e2-485c-a958-773620259283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.739510] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3ee66ba1-a3b7-4aea-93fd-87297ee2fcd0 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "c1c85cc0-53f1-4920-8f3e-6dd69414fa85" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.626s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.788420] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820314, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576442} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.788742] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 3dfeaf57-2244-418e-a04a-ed4143e454d5/3dfeaf57-2244-418e-a04a-ed4143e454d5.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1520.788961] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1520.789328] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11f60ebf-0188-4c18-9d3e-47ea639081d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.799874] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for the task: (returnval){ [ 1520.799874] env[63241]: value = "task-1820318" [ 1520.799874] env[63241]: _type = "Task" [ 1520.799874] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.807495] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820318, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.813528] env[63241]: DEBUG nova.network.neutron [-] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.909320] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52420e9e-7be4-ff3e-64b9-97ec2efd9940, 'name': SearchDatastore_Task, 'duration_secs': 0.015064} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.909320] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.909320] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1520.909642] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.932465] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52bc215a-9956-0131-149d-4d86078709ee, 'name': SearchDatastore_Task, 'duration_secs': 0.01303} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.932465] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.932651] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1520.932815] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.932961] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1520.933212] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b36e3a02-4157-462a-bc12-d7309200075a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.935097] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-176071d2-fe92-452d-877e-a9b50a19bfaa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.939371] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c57e405-a031-4bbe-a176-c7dea10c0928 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.946842] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1520.947085] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1520.947928] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1520.947928] env[63241]: value = "task-1820319" [ 1520.947928] env[63241]: _type = "Task" [ 1520.947928] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.948950] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5f3f683-1ca4-4e1d-a1aa-e93ac24e5e18 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.957311] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6eb84c-4106-4fb1-820c-1ca634636464 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.976755] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1520.976755] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5272f6d1-485c-878c-baf7-4dfbc70f6997" [ 1520.976755] env[63241]: _type = "Task" [ 1520.976755] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.982059] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.998694] env[63241]: DEBUG nova.compute.manager [req-68a607cc-0b50-477e-bfeb-cec2591410d8 req-a96f21c5-127b-4752-95f7-7e7dbca932bf service nova] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Detach interface failed, port_id=2f1329d9-5fe9-46cc-817a-c247a1999456, reason: Instance eaed706d-b3db-46ed-8c70-08f80479afa4 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1521.005654] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "refresh_cache-c7b034f7-1d7f-4782-9ecb-5987c35339cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.005654] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquired lock "refresh_cache-c7b034f7-1d7f-4782-9ecb-5987c35339cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.005866] env[63241]: DEBUG nova.network.neutron [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1521.006936] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5272f6d1-485c-878c-baf7-4dfbc70f6997, 'name': SearchDatastore_Task, 'duration_secs': 0.011492} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.009028] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d2a8328-0b9e-41ff-990b-530fb8ea852e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.015230] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1521.015230] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b2cffe-cede-3e2d-bccb-207f14606166" [ 1521.015230] env[63241]: _type = "Task" [ 1521.015230] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.027176] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b2cffe-cede-3e2d-bccb-207f14606166, 'name': SearchDatastore_Task, 'duration_secs': 0.009593} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.029959] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.030192] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce/965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1521.030635] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef250afc-04ac-4e27-8b1f-dd52e182d379 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.040714] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1521.040714] env[63241]: value = "task-1820320" [ 1521.040714] env[63241]: _type = "Task" [ 1521.040714] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.050924] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820320, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.197566] env[63241]: DEBUG oslo_concurrency.lockutils [req-7275464a-29f5-493c-926d-f5f9de26fdc5 req-b6377ddd-7605-4115-904a-65c7c560e04e service nova] Releasing lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.199815] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d1493b-cf78-4792-b6eb-deee44d597cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.209309] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7977419-30f4-4cb3-a778-7ad7e99ebdcf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.245786] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95018c75-425c-46e4-b060-26a52578d89e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.255206] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a499280d-6afe-4e15-a3fa-26b1a6243e7e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.271324] env[63241]: DEBUG nova.compute.provider_tree [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1521.309494] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820318, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064648} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.309851] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1521.311040] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1e73fa-257b-47d3-b552-98df89b20b00 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.316141] env[63241]: INFO nova.compute.manager [-] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Took 1.61 seconds to deallocate network for instance. [ 1521.336079] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 3dfeaf57-2244-418e-a04a-ed4143e454d5/3dfeaf57-2244-418e-a04a-ed4143e454d5.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1521.339710] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b84c384-a8b8-4eeb-8da5-af7e6f5bdb1b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.364206] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for the task: (returnval){ [ 1521.364206] env[63241]: value = "task-1820321" [ 1521.364206] env[63241]: _type = "Task" [ 1521.364206] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.374158] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820321, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.462520] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820319, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.539601] env[63241]: DEBUG nova.network.neutron [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1521.554576] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820320, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.774793] env[63241]: DEBUG nova.scheduler.client.report [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1521.788874] env[63241]: DEBUG nova.network.neutron [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Updating instance_info_cache with network_info: [{"id": "dcdf6593-f699-4bf0-8fa5-16a49caabae8", "address": "fa:16:3e:1d:eb:0a", "network": {"id": "a8367b18-022c-41b4-8c92-d1415c31263d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2039791152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fedeb3768ebc4b96bd5a85bfb0a03cf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcdf6593-f6", "ovs_interfaceid": "dcdf6593-f699-4bf0-8fa5-16a49caabae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.858149] env[63241]: DEBUG oslo_concurrency.lockutils [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.874021] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820321, 'name': ReconfigVM_Task, 'duration_secs': 0.355659} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.874401] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 3dfeaf57-2244-418e-a04a-ed4143e454d5/3dfeaf57-2244-418e-a04a-ed4143e454d5.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1521.875023] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b1c69ac-5ca7-42ff-8326-360fec93cf91 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.882300] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for the task: (returnval){ [ 1521.882300] env[63241]: value = "task-1820322" [ 1521.882300] env[63241]: _type = "Task" [ 1521.882300] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.891716] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820322, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.963303] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820319, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523803} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.963303] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1521.963530] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1521.963715] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-055f83d3-1121-45c6-bcb8-027fb1621437 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.974058] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1521.974058] env[63241]: value = "task-1820323" [ 1521.974058] env[63241]: _type = "Task" [ 1521.974058] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.985139] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820323, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.052470] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820320, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.784287} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.052916] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce/965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1522.053093] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1522.053454] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53db21b7-a5d8-44da-ac66-c4be99a5215d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.057901] env[63241]: DEBUG nova.compute.manager [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Received event network-vif-plugged-dcdf6593-f699-4bf0-8fa5-16a49caabae8 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1522.058136] env[63241]: DEBUG oslo_concurrency.lockutils [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] Acquiring lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.058294] env[63241]: DEBUG oslo_concurrency.lockutils [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] Lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.058506] env[63241]: DEBUG oslo_concurrency.lockutils [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] Lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.058704] env[63241]: DEBUG nova.compute.manager [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] No waiting events found dispatching network-vif-plugged-dcdf6593-f699-4bf0-8fa5-16a49caabae8 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1522.058906] env[63241]: WARNING nova.compute.manager [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Received unexpected event network-vif-plugged-dcdf6593-f699-4bf0-8fa5-16a49caabae8 for instance with vm_state building and task_state spawning. [ 1522.059142] env[63241]: DEBUG nova.compute.manager [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Received event network-changed-dcdf6593-f699-4bf0-8fa5-16a49caabae8 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1522.059361] env[63241]: DEBUG nova.compute.manager [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Refreshing instance network info cache due to event network-changed-dcdf6593-f699-4bf0-8fa5-16a49caabae8. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1522.059600] env[63241]: DEBUG oslo_concurrency.lockutils [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] Acquiring lock "refresh_cache-c7b034f7-1d7f-4782-9ecb-5987c35339cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.065656] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1522.065656] env[63241]: value = "task-1820324" [ 1522.065656] env[63241]: _type = "Task" [ 1522.065656] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.080971] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820324, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.281153] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.080s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.283434] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.378s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.284971] env[63241]: INFO nova.compute.claims [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1522.292027] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Releasing lock "refresh_cache-c7b034f7-1d7f-4782-9ecb-5987c35339cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.292226] env[63241]: DEBUG nova.compute.manager [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Instance network_info: |[{"id": "dcdf6593-f699-4bf0-8fa5-16a49caabae8", "address": "fa:16:3e:1d:eb:0a", "network": {"id": "a8367b18-022c-41b4-8c92-d1415c31263d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2039791152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fedeb3768ebc4b96bd5a85bfb0a03cf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcdf6593-f6", "ovs_interfaceid": "dcdf6593-f699-4bf0-8fa5-16a49caabae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1522.292522] env[63241]: DEBUG oslo_concurrency.lockutils [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] Acquired lock "refresh_cache-c7b034f7-1d7f-4782-9ecb-5987c35339cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.292710] env[63241]: DEBUG nova.network.neutron [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Refreshing network info cache for port dcdf6593-f699-4bf0-8fa5-16a49caabae8 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1522.293943] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:eb:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcdf6593-f699-4bf0-8fa5-16a49caabae8', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1522.302168] env[63241]: DEBUG oslo.service.loopingcall [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1522.303267] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1522.304325] env[63241]: INFO nova.scheduler.client.report [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Deleted allocations for instance 3c51d4dc-5a2c-4483-9aa5-8bab532971d4 [ 1522.305180] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f365cd9a-4aaa-46ca-b174-be4256dbe410 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.339128] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1522.339128] env[63241]: value = "task-1820325" [ 1522.339128] env[63241]: _type = "Task" [ 1522.339128] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.354269] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820325, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.395851] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820322, 'name': Rename_Task, 'duration_secs': 0.192053} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.396376] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1522.396753] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ed0928e-4d10-4f7f-ba6b-30421dbecdd6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.405158] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for the task: (returnval){ [ 1522.405158] env[63241]: value = "task-1820326" [ 1522.405158] env[63241]: _type = "Task" [ 1522.405158] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.416036] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820326, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.431693] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquiring lock "780f3eee-f6c7-4054-8e6e-a370f74dc405" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.431935] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Lock "780f3eee-f6c7-4054-8e6e-a370f74dc405" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.432179] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquiring lock "780f3eee-f6c7-4054-8e6e-a370f74dc405-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.432376] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Lock "780f3eee-f6c7-4054-8e6e-a370f74dc405-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.432665] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Lock "780f3eee-f6c7-4054-8e6e-a370f74dc405-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.434923] env[63241]: INFO nova.compute.manager [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Terminating instance [ 1522.437601] env[63241]: DEBUG nova.compute.manager [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1522.437799] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1522.438650] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ffb6d6-d772-4332-a390-6d9a7c74e9bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.447218] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1522.447499] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2442f271-c239-4d32-aa9b-b0444c9fab23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.458037] env[63241]: DEBUG oslo_vmware.api [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for the task: (returnval){ [ 1522.458037] env[63241]: value = "task-1820327" [ 1522.458037] env[63241]: _type = "Task" [ 1522.458037] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.468620] env[63241]: DEBUG oslo_vmware.api [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1820327, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.483821] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820323, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09899} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.484120] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1522.484945] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320cf78f-5a45-47a2-860c-644d0e4db673 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.509632] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1522.511056] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-757a22b1-9177-448a-b976-7f13c7b27d51 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.531546] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1522.531546] env[63241]: value = "task-1820328" [ 1522.531546] env[63241]: _type = "Task" [ 1522.531546] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.540455] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820328, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.581401] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820324, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077048} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.581732] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1522.582582] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42450748-650e-46ed-a5ee-76282318df13 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.609591] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce/965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1522.610078] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27636d2d-6fd4-4c3a-9746-255b88e94f8f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.632937] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1522.632937] env[63241]: value = "task-1820329" [ 1522.632937] env[63241]: _type = "Task" [ 1522.632937] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.644874] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820329, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.827052] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6baf993d-555d-47b4-ae52-fa53aacacdcb tempest-ServersTestManualDisk-1548747611 tempest-ServersTestManualDisk-1548747611-project-member] Lock "3c51d4dc-5a2c-4483-9aa5-8bab532971d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.948s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.850598] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820325, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.921142] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820326, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.969130] env[63241]: DEBUG oslo_vmware.api [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1820327, 'name': PowerOffVM_Task, 'duration_secs': 0.271171} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.969489] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1522.969669] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1522.969935] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7eee962-5a6d-4b21-994c-3d9d226a2bf1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.042807] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820328, 'name': ReconfigVM_Task, 'duration_secs': 0.463842} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.043146] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Reconfigured VM instance instance-00000006 to attach disk [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1523.044106] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d0cc1eb-e058-41ea-aef3-6c05bdfc1c4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.051825] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1523.051825] env[63241]: value = "task-1820331" [ 1523.051825] env[63241]: _type = "Task" [ 1523.051825] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.061186] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820331, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.076946] env[63241]: DEBUG oslo_concurrency.lockutils [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "81854e13-e0c1-43a9-8529-678d56d57bbf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.077284] env[63241]: DEBUG oslo_concurrency.lockutils [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "81854e13-e0c1-43a9-8529-678d56d57bbf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.077517] env[63241]: DEBUG oslo_concurrency.lockutils [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "81854e13-e0c1-43a9-8529-678d56d57bbf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.077749] env[63241]: DEBUG oslo_concurrency.lockutils [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "81854e13-e0c1-43a9-8529-678d56d57bbf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.077933] env[63241]: DEBUG oslo_concurrency.lockutils [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "81854e13-e0c1-43a9-8529-678d56d57bbf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.080695] env[63241]: INFO nova.compute.manager [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Terminating instance [ 1523.085378] env[63241]: DEBUG nova.compute.manager [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1523.085722] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1523.086797] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f8aad3-0c71-44b3-b585-5ef4c4bcb2af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.096118] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1523.096405] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-390989e6-f279-4d94-8b59-362df711bbbb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.111131] env[63241]: DEBUG oslo_vmware.api [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1523.111131] env[63241]: value = "task-1820332" [ 1523.111131] env[63241]: _type = "Task" [ 1523.111131] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.123458] env[63241]: DEBUG oslo_vmware.api [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.141291] env[63241]: DEBUG nova.network.neutron [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Updated VIF entry in instance network info cache for port dcdf6593-f699-4bf0-8fa5-16a49caabae8. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1523.141658] env[63241]: DEBUG nova.network.neutron [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Updating instance_info_cache with network_info: [{"id": "dcdf6593-f699-4bf0-8fa5-16a49caabae8", "address": "fa:16:3e:1d:eb:0a", "network": {"id": "a8367b18-022c-41b4-8c92-d1415c31263d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2039791152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fedeb3768ebc4b96bd5a85bfb0a03cf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcdf6593-f6", "ovs_interfaceid": "dcdf6593-f699-4bf0-8fa5-16a49caabae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.153041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "bef91c1c-a418-4464-ae7b-883ffb7e9695" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.153041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "bef91c1c-a418-4464-ae7b-883ffb7e9695" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.153041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "bef91c1c-a418-4464-ae7b-883ffb7e9695-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.153041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "bef91c1c-a418-4464-ae7b-883ffb7e9695-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.153041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "bef91c1c-a418-4464-ae7b-883ffb7e9695-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.153041] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820329, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.153489] env[63241]: INFO nova.compute.manager [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Terminating instance [ 1523.155050] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1523.155268] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1523.155451] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Deleting the datastore file [datastore1] 780f3eee-f6c7-4054-8e6e-a370f74dc405 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1523.156499] env[63241]: DEBUG nova.compute.manager [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1523.156715] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1523.156963] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25f00a72-2a1b-4c0b-a767-8aeb1791e4eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.159459] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c617191-8948-4af6-b619-5364717c5a1c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.167639] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1523.169171] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85195ebe-dd68-4c3f-8eaa-8c32db21b29c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.170560] env[63241]: DEBUG oslo_vmware.api [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for the task: (returnval){ [ 1523.170560] env[63241]: value = "task-1820333" [ 1523.170560] env[63241]: _type = "Task" [ 1523.170560] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.182148] env[63241]: DEBUG oslo_vmware.api [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1820333, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.184475] env[63241]: DEBUG oslo_vmware.api [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1523.184475] env[63241]: value = "task-1820334" [ 1523.184475] env[63241]: _type = "Task" [ 1523.184475] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.193131] env[63241]: DEBUG oslo_vmware.api [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820334, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.352029] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820325, 'name': CreateVM_Task, 'duration_secs': 0.913673} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.354812] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1523.355870] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.356051] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.356364] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1523.356661] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c0672dd-cca3-4cb0-9d0d-e8b80c46e0c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.363477] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1523.363477] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522bfdda-1ec2-f619-7178-aa6c34c70491" [ 1523.363477] env[63241]: _type = "Task" [ 1523.363477] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.377752] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522bfdda-1ec2-f619-7178-aa6c34c70491, 'name': SearchDatastore_Task, 'duration_secs': 0.011768} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.378096] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.378334] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1523.378683] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.378811] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.379041] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1523.379340] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82c4a456-b467-4303-9825-fd83260cf0bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.392579] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1523.392796] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1523.396411] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e02f6ae-4385-466c-9597-ce08987e114c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.403447] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1523.403447] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ba6f7b-ecf8-28f8-c6fc-970c3062a9b0" [ 1523.403447] env[63241]: _type = "Task" [ 1523.403447] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.417319] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ba6f7b-ecf8-28f8-c6fc-970c3062a9b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.422097] env[63241]: DEBUG oslo_vmware.api [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820326, 'name': PowerOnVM_Task, 'duration_secs': 0.534544} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.422753] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1523.422753] env[63241]: INFO nova.compute.manager [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Took 8.81 seconds to spawn the instance on the hypervisor. [ 1523.422847] env[63241]: DEBUG nova.compute.manager [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1523.423675] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d6697b-8951-4899-b898-e7146d51189e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.562877] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820331, 'name': Rename_Task, 'duration_secs': 0.219665} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.565329] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1523.565885] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c4f8c70-9f98-4f2b-87db-39005ee1b34c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.572665] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1523.572665] env[63241]: value = "task-1820335" [ 1523.572665] env[63241]: _type = "Task" [ 1523.572665] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.583419] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.624217] env[63241]: DEBUG oslo_vmware.api [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820332, 'name': PowerOffVM_Task, 'duration_secs': 0.277737} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.624532] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1523.624730] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1523.625015] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a792dc1-a453-45a2-ab4f-c600b2160627 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.645103] env[63241]: DEBUG oslo_concurrency.lockutils [req-4fc2c366-e8c9-4afb-8306-a1f794878774 req-a339e268-f42f-497d-a277-3a332b909823 service nova] Releasing lock "refresh_cache-c7b034f7-1d7f-4782-9ecb-5987c35339cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.645539] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820329, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.685762] env[63241]: DEBUG oslo_vmware.api [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Task: {'id': task-1820333, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.27019} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.688926] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1523.689168] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1523.689373] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1523.689576] env[63241]: INFO nova.compute.manager [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1523.689852] env[63241]: DEBUG oslo.service.loopingcall [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1523.692347] env[63241]: DEBUG nova.compute.manager [-] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1523.692461] env[63241]: DEBUG nova.network.neutron [-] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1523.699602] env[63241]: DEBUG oslo_vmware.api [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820334, 'name': PowerOffVM_Task, 'duration_secs': 0.233435} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.699832] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1523.700035] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1523.700259] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3494812-d11d-441f-830c-4c939598697c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.755084] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae423e6-1cc1-4c62-b2e8-dbc6e1fa6030 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.764666] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0bbd8d5-2acd-4259-b4eb-2599c2a88806 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.807429] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ae5a25-4ee9-4c4f-9c64-ffabd01fd046 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.813932] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1523.814168] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1523.814350] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Deleting the datastore file [datastore1] bef91c1c-a418-4464-ae7b-883ffb7e9695 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1523.814621] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1523.814792] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1523.816245] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Deleting the datastore file [datastore1] 81854e13-e0c1-43a9-8529-678d56d57bbf {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1523.816515] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d7287ab-305d-4be9-a231-98bb25372c03 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.818520] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-419cf201-99f6-494f-96e8-fa9293eab19f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.827585] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0027a39e-0011-4d63-bc00-9f12cd015fea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.831941] env[63241]: DEBUG oslo_vmware.api [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1523.831941] env[63241]: value = "task-1820339" [ 1523.831941] env[63241]: _type = "Task" [ 1523.831941] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.833435] env[63241]: DEBUG oslo_vmware.api [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for the task: (returnval){ [ 1523.833435] env[63241]: value = "task-1820338" [ 1523.833435] env[63241]: _type = "Task" [ 1523.833435] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.848839] env[63241]: DEBUG nova.compute.provider_tree [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1523.855943] env[63241]: DEBUG oslo_vmware.api [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820339, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.859043] env[63241]: DEBUG oslo_vmware.api [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.915281] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ba6f7b-ecf8-28f8-c6fc-970c3062a9b0, 'name': SearchDatastore_Task, 'duration_secs': 0.018981} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.915798] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d9e1f1c-6c30-423d-8cde-1612b5c68ec5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.922542] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1523.922542] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e02d0f-b390-e546-3bf7-483618e9fff4" [ 1523.922542] env[63241]: _type = "Task" [ 1523.922542] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.931455] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e02d0f-b390-e546-3bf7-483618e9fff4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.946023] env[63241]: INFO nova.compute.manager [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Took 46.11 seconds to build instance. [ 1524.094239] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820335, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.145817] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820329, 'name': ReconfigVM_Task, 'duration_secs': 1.359087} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.146196] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce/965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1524.146902] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0194128-6c29-4791-8751-3d82413c7bcd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.154438] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1524.154438] env[63241]: value = "task-1820340" [ 1524.154438] env[63241]: _type = "Task" [ 1524.154438] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.167056] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820340, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.357183] env[63241]: DEBUG nova.scheduler.client.report [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1524.361358] env[63241]: DEBUG oslo_vmware.api [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820339, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281882} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.365204] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1524.365512] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1524.365699] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1524.365885] env[63241]: INFO nova.compute.manager [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1524.366273] env[63241]: DEBUG oslo.service.loopingcall [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1524.367251] env[63241]: DEBUG oslo_vmware.api [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Task: {'id': task-1820338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.282528} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.367488] env[63241]: DEBUG nova.compute.manager [-] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1524.367593] env[63241]: DEBUG nova.network.neutron [-] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1524.370107] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1524.370325] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1524.370975] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1524.371203] env[63241]: INFO nova.compute.manager [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1524.371500] env[63241]: DEBUG oslo.service.loopingcall [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1524.371809] env[63241]: DEBUG nova.compute.manager [-] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1524.371905] env[63241]: DEBUG nova.network.neutron [-] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1524.440439] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e02d0f-b390-e546-3bf7-483618e9fff4, 'name': SearchDatastore_Task, 'duration_secs': 0.011763} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.440439] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.440439] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c7b034f7-1d7f-4782-9ecb-5987c35339cc/c7b034f7-1d7f-4782-9ecb-5987c35339cc.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1524.440439] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31edd662-152e-4749-a9aa-e003eb6eec69 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.446677] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b226b4e7-d514-400a-8559-93670d63c6f1 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Lock "3dfeaf57-2244-418e-a04a-ed4143e454d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.509s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.452021] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1524.452021] env[63241]: value = "task-1820341" [ 1524.452021] env[63241]: _type = "Task" [ 1524.452021] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.460677] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820341, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.587454] env[63241]: DEBUG oslo_vmware.api [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820335, 'name': PowerOnVM_Task, 'duration_secs': 0.729634} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.587792] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1524.588047] env[63241]: DEBUG nova.compute.manager [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1524.589193] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058353bc-2a1b-42d4-9871-1aa716969cbd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.667353] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820340, 'name': Rename_Task, 'duration_secs': 0.265901} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.667353] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1524.667353] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44ec039b-a371-45f3-9c0a-ee051b6fb551 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.675187] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1524.675187] env[63241]: value = "task-1820342" [ 1524.675187] env[63241]: _type = "Task" [ 1524.675187] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.686668] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820342, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.863150] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.870023] env[63241]: DEBUG nova.compute.manager [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1524.870466] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.340s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.872150] env[63241]: INFO nova.compute.claims [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1524.964609] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820341, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.985292] env[63241]: DEBUG nova.compute.manager [req-6798a568-329a-47df-b647-ef4e551f8790 req-2bff7c65-2671-4564-b1b7-eb2de3c08632 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Received event network-vif-deleted-6d8f94d0-98d7-4846-98e1-76924fe639d0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1524.985292] env[63241]: INFO nova.compute.manager [req-6798a568-329a-47df-b647-ef4e551f8790 req-2bff7c65-2671-4564-b1b7-eb2de3c08632 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Neutron deleted interface 6d8f94d0-98d7-4846-98e1-76924fe639d0; detaching it from the instance and deleting it from the info cache [ 1524.985292] env[63241]: DEBUG nova.network.neutron [req-6798a568-329a-47df-b647-ef4e551f8790 req-2bff7c65-2671-4564-b1b7-eb2de3c08632 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.006031] env[63241]: DEBUG nova.compute.manager [req-317ee820-e4f9-46fb-9e4f-c6c64f3a5028 req-88e15cac-717c-4a5c-af68-f2431f1a5c49 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Received event network-vif-deleted-39b9ee92-fa8c-4018-be8f-6ad78d44a1a8 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1525.006031] env[63241]: INFO nova.compute.manager [req-317ee820-e4f9-46fb-9e4f-c6c64f3a5028 req-88e15cac-717c-4a5c-af68-f2431f1a5c49 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Neutron deleted interface 39b9ee92-fa8c-4018-be8f-6ad78d44a1a8; detaching it from the instance and deleting it from the info cache [ 1525.006031] env[63241]: DEBUG nova.network.neutron [req-317ee820-e4f9-46fb-9e4f-c6c64f3a5028 req-88e15cac-717c-4a5c-af68-f2431f1a5c49 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.111257] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.188943] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820342, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.287448] env[63241]: DEBUG nova.network.neutron [-] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.301304] env[63241]: DEBUG nova.network.neutron [-] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.389384] env[63241]: DEBUG nova.compute.utils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1525.389384] env[63241]: DEBUG nova.compute.manager [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1525.389384] env[63241]: DEBUG nova.network.neutron [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1525.428309] env[63241]: DEBUG nova.network.neutron [-] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.462572] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820341, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5721} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.462919] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c7b034f7-1d7f-4782-9ecb-5987c35339cc/c7b034f7-1d7f-4782-9ecb-5987c35339cc.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1525.463196] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1525.463516] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a900aca-4336-43b8-beb5-e518b256958c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.474562] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1525.474562] env[63241]: value = "task-1820343" [ 1525.474562] env[63241]: _type = "Task" [ 1525.474562] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.483737] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820343, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.485759] env[63241]: DEBUG nova.policy [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e059a98d8bad4cdcb7b0539e5c3b3c4c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64dbd26d47ae4b84ade0fc6114be0112', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1525.489182] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7da6cdc-1794-463a-a2e6-26c475c703bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.499722] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12e3966-7a86-445a-a84e-1a3b7b560ac1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.512876] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd70a3c5-679e-41c5-9e5e-b5e4aa5c4acf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.524483] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1decd3f-802c-4c4b-8d95-928d5f1e683d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.547818] env[63241]: DEBUG nova.compute.manager [req-6798a568-329a-47df-b647-ef4e551f8790 req-2bff7c65-2671-4564-b1b7-eb2de3c08632 service nova] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Detach interface failed, port_id=6d8f94d0-98d7-4846-98e1-76924fe639d0, reason: Instance 81854e13-e0c1-43a9-8529-678d56d57bbf could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1525.570575] env[63241]: DEBUG nova.compute.manager [req-317ee820-e4f9-46fb-9e4f-c6c64f3a5028 req-88e15cac-717c-4a5c-af68-f2431f1a5c49 service nova] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Detach interface failed, port_id=39b9ee92-fa8c-4018-be8f-6ad78d44a1a8, reason: Instance 780f3eee-f6c7-4054-8e6e-a370f74dc405 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1525.687163] env[63241]: DEBUG oslo_vmware.api [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820342, 'name': PowerOnVM_Task, 'duration_secs': 0.769136} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.687560] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1525.687695] env[63241]: INFO nova.compute.manager [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1525.687876] env[63241]: DEBUG nova.compute.manager [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1525.689325] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389d2f87-72a3-4044-a928-0a5ba84c6706 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.750180] env[63241]: DEBUG oslo_concurrency.lockutils [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquiring lock "3dfeaf57-2244-418e-a04a-ed4143e454d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.750524] env[63241]: DEBUG oslo_concurrency.lockutils [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Lock "3dfeaf57-2244-418e-a04a-ed4143e454d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.750747] env[63241]: DEBUG oslo_concurrency.lockutils [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquiring lock "3dfeaf57-2244-418e-a04a-ed4143e454d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.751331] env[63241]: DEBUG oslo_concurrency.lockutils [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Lock "3dfeaf57-2244-418e-a04a-ed4143e454d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.751331] env[63241]: DEBUG oslo_concurrency.lockutils [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Lock "3dfeaf57-2244-418e-a04a-ed4143e454d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.758636] env[63241]: INFO nova.compute.manager [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Terminating instance [ 1525.760744] env[63241]: DEBUG nova.compute.manager [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1525.760942] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1525.762137] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad2bedf-a926-43d5-ab62-f7df78dc4b07 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.772364] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1525.772630] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62e1a560-2d8c-4756-a62f-b48ad62f4cfe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.780368] env[63241]: DEBUG oslo_vmware.api [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for the task: (returnval){ [ 1525.780368] env[63241]: value = "task-1820344" [ 1525.780368] env[63241]: _type = "Task" [ 1525.780368] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.791467] env[63241]: INFO nova.compute.manager [-] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Took 1.42 seconds to deallocate network for instance. [ 1525.791832] env[63241]: DEBUG oslo_vmware.api [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820344, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.807643] env[63241]: INFO nova.compute.manager [-] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Took 2.12 seconds to deallocate network for instance. [ 1525.884827] env[63241]: DEBUG nova.compute.manager [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1525.932617] env[63241]: INFO nova.compute.manager [-] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Took 1.56 seconds to deallocate network for instance. [ 1525.986534] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820343, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070559} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.989312] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1525.990477] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83f2ab7-303a-4310-8a7f-0f740e9ece52 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.015233] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] c7b034f7-1d7f-4782-9ecb-5987c35339cc/c7b034f7-1d7f-4782-9ecb-5987c35339cc.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1526.017913] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a108f4a-015f-4129-b062-60e60874f7d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.040591] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1526.040591] env[63241]: value = "task-1820345" [ 1526.040591] env[63241]: _type = "Task" [ 1526.040591] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.049624] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820345, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.209701] env[63241]: INFO nova.compute.manager [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Took 46.80 seconds to build instance. [ 1526.287871] env[63241]: DEBUG nova.network.neutron [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Successfully created port: c486fc54-8deb-4aab-89c9-18d333e4490e {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1526.298891] env[63241]: DEBUG oslo_concurrency.lockutils [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.299230] env[63241]: DEBUG oslo_vmware.api [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820344, 'name': PowerOffVM_Task, 'duration_secs': 0.386262} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.299518] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1526.299710] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1526.299978] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-daf23519-c085-468c-b590-0bf8d834a47b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.314233] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.445227] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.474223] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8810f7a5-c698-4b07-bbe6-53ba53eb8826 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.482955] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec6b45b-bc6d-4cf3-840e-242e06e8f975 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.516949] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0a12bd-5218-4160-a4bd-5580b622d201 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.525933] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bf9da3-d690-421b-8707-3df8e0b469cb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.540413] env[63241]: DEBUG nova.compute.provider_tree [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1526.550607] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820345, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.713576] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a607716d-38ca-436e-9a56-051af2e853a3 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.583s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.905992] env[63241]: DEBUG nova.compute.manager [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1526.936260] env[63241]: DEBUG nova.virt.hardware [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1526.936260] env[63241]: DEBUG nova.virt.hardware [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1526.936260] env[63241]: DEBUG nova.virt.hardware [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1526.936260] env[63241]: DEBUG nova.virt.hardware [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1526.936260] env[63241]: DEBUG nova.virt.hardware [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1526.936260] env[63241]: DEBUG nova.virt.hardware [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1526.936260] env[63241]: DEBUG nova.virt.hardware [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1526.936260] env[63241]: DEBUG nova.virt.hardware [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1526.936260] env[63241]: DEBUG nova.virt.hardware [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1526.936260] env[63241]: DEBUG nova.virt.hardware [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1526.936260] env[63241]: DEBUG nova.virt.hardware [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1526.936260] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc10e96-462d-46ee-b899-4f8dc3b8165d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.946237] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600cf802-02d6-4363-8217-35aa4de78f99 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.046354] env[63241]: DEBUG nova.scheduler.client.report [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1527.056436] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820345, 'name': ReconfigVM_Task, 'duration_secs': 0.959053} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.057405] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Reconfigured VM instance instance-00000032 to attach disk [datastore1] c7b034f7-1d7f-4782-9ecb-5987c35339cc/c7b034f7-1d7f-4782-9ecb-5987c35339cc.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1527.057405] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=63241) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1527.058184] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-30e3c715-9c44-4ee8-9abf-ad808f1c728a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.067222] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1527.067222] env[63241]: value = "task-1820347" [ 1527.067222] env[63241]: _type = "Task" [ 1527.067222] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.080157] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820347, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.116629] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1527.116629] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1527.116629] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Deleting the datastore file [datastore1] 3dfeaf57-2244-418e-a04a-ed4143e454d5 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1527.116629] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28792b50-8e6d-4fb8-899e-7c056251a13c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.125221] env[63241]: DEBUG oslo_vmware.api [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for the task: (returnval){ [ 1527.125221] env[63241]: value = "task-1820348" [ 1527.125221] env[63241]: _type = "Task" [ 1527.125221] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.143071] env[63241]: DEBUG oslo_vmware.api [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.196299] env[63241]: INFO nova.compute.manager [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Rebuilding instance [ 1527.237757] env[63241]: DEBUG nova.compute.manager [req-fbb1d90c-a80d-43a1-b1dc-3f41ec5b493b req-0382bd5b-906f-4745-b5a8-41bf34858402 service nova] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Received event network-vif-deleted-e6a64330-a394-41bb-9270-490bd00a4bf4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1527.251098] env[63241]: DEBUG nova.compute.manager [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1527.251098] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236da900-6261-421f-95bb-daefce936ce1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.552451] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.682s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.553118] env[63241]: DEBUG nova.compute.manager [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1527.555751] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.076s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.556876] env[63241]: DEBUG nova.objects.instance [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lazy-loading 'resources' on Instance uuid 0b7c72e0-79b9-4435-9676-7a0e9afaf936 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1527.582803] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820347, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.067821} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.583021] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=63241) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1527.583813] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39b38d4-33a5-4327-9b95-bc3211be7ec0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.610881] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] c7b034f7-1d7f-4782-9ecb-5987c35339cc/ephemeral_0.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1527.611859] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82299cf8-4b58-4a36-a518-5348731dc9d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.632746] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1527.632746] env[63241]: value = "task-1820349" [ 1527.632746] env[63241]: _type = "Task" [ 1527.632746] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.636135] env[63241]: DEBUG oslo_vmware.api [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Task: {'id': task-1820348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167977} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.640029] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1527.640167] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1527.640400] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1527.643163] env[63241]: INFO nova.compute.manager [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Took 1.88 seconds to destroy the instance on the hypervisor. [ 1527.643163] env[63241]: DEBUG oslo.service.loopingcall [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.643163] env[63241]: DEBUG nova.compute.manager [-] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1527.643163] env[63241]: DEBUG nova.network.neutron [-] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1527.652237] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820349, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.766650] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1527.767328] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc8ab7a4-728b-44b4-b724-c95eef1b2c4e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.776178] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1527.776178] env[63241]: value = "task-1820350" [ 1527.776178] env[63241]: _type = "Task" [ 1527.776178] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.798369] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.063166] env[63241]: DEBUG nova.compute.utils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1528.064906] env[63241]: DEBUG nova.compute.manager [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1528.065209] env[63241]: DEBUG nova.network.neutron [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1528.145416] env[63241]: DEBUG nova.policy [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad26e4512bbf4f8ca14cc2a405b349fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd455c7660f544a0ea156ce0f7aa3515c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1528.152431] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820349, 'name': ReconfigVM_Task, 'duration_secs': 0.398914} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.156568] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Reconfigured VM instance instance-00000032 to attach disk [datastore1] c7b034f7-1d7f-4782-9ecb-5987c35339cc/ephemeral_0.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1528.157656] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db147d81-d74c-470e-adec-6af24ab934d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.167896] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1528.167896] env[63241]: value = "task-1820351" [ 1528.167896] env[63241]: _type = "Task" [ 1528.167896] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.179472] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820351, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.288931] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820350, 'name': PowerOffVM_Task, 'duration_secs': 0.210631} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.289307] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1528.290083] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1528.292146] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe398bf-4796-4fff-95fd-62f0c005a0a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.300257] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1528.300577] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bdbb03e-97a5-4fd8-9795-c6da1a8b2d72 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.386345] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1528.386614] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1528.386876] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleting the datastore file [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1528.387252] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e97f6bc1-a087-4c67-8b2d-11f16f8dc2a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.404767] env[63241]: DEBUG nova.network.neutron [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Successfully updated port: c486fc54-8deb-4aab-89c9-18d333e4490e {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1528.406160] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1528.406160] env[63241]: value = "task-1820353" [ 1528.406160] env[63241]: _type = "Task" [ 1528.406160] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.425739] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.572611] env[63241]: DEBUG nova.compute.manager [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1528.613728] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6312b9ec-26a8-4c67-abfb-7b8b1fe00752 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.619836] env[63241]: DEBUG nova.network.neutron [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Successfully created port: 4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1528.630415] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93cabc5-ba36-44c5-80fe-5661f694956b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.637818] env[63241]: DEBUG nova.compute.manager [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Stashing vm_state: active {{(pid=63241) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1528.679034] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe4eedc-c3d5-4a64-8c55-36812b993a74 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.690191] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820351, 'name': Rename_Task, 'duration_secs': 0.175928} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.691444] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1b1743-7714-41dd-8409-57aa75637636 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.695413] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1528.695993] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a995960-c23a-4421-8b80-b6d72a63125b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.707943] env[63241]: DEBUG nova.network.neutron [-] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.709426] env[63241]: DEBUG nova.compute.provider_tree [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1528.712018] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1528.712018] env[63241]: value = "task-1820354" [ 1528.712018] env[63241]: _type = "Task" [ 1528.712018] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.721566] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820354, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.910424] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquiring lock "refresh_cache-a534b054-2143-41c4-a0fa-028339ecdbbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.910424] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquired lock "refresh_cache-a534b054-2143-41c4-a0fa-028339ecdbbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.910424] env[63241]: DEBUG nova.network.neutron [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1528.922409] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13746} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.922409] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1528.922507] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1528.922649] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1529.178374] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.212831] env[63241]: INFO nova.compute.manager [-] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Took 1.57 seconds to deallocate network for instance. [ 1529.230731] env[63241]: DEBUG oslo_vmware.api [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820354, 'name': PowerOnVM_Task, 'duration_secs': 0.482093} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.230997] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1529.231279] env[63241]: INFO nova.compute.manager [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Took 8.68 seconds to spawn the instance on the hypervisor. [ 1529.232228] env[63241]: DEBUG nova.compute.manager [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1529.232421] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344f1fec-8791-4023-a1c2-41fe6356ea93 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.248573] env[63241]: ERROR nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [req-1e67084f-3c25-4ddf-9e2e-f74afe5294be] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1e67084f-3c25-4ddf-9e2e-f74afe5294be"}]} [ 1529.281017] env[63241]: DEBUG nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1529.289925] env[63241]: DEBUG nova.compute.manager [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Received event network-vif-plugged-c486fc54-8deb-4aab-89c9-18d333e4490e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1529.290147] env[63241]: DEBUG oslo_concurrency.lockutils [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] Acquiring lock "a534b054-2143-41c4-a0fa-028339ecdbbf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.290351] env[63241]: DEBUG oslo_concurrency.lockutils [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] Lock "a534b054-2143-41c4-a0fa-028339ecdbbf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.290563] env[63241]: DEBUG oslo_concurrency.lockutils [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] Lock "a534b054-2143-41c4-a0fa-028339ecdbbf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.290743] env[63241]: DEBUG nova.compute.manager [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] No waiting events found dispatching network-vif-plugged-c486fc54-8deb-4aab-89c9-18d333e4490e {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1529.290904] env[63241]: WARNING nova.compute.manager [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Received unexpected event network-vif-plugged-c486fc54-8deb-4aab-89c9-18d333e4490e for instance with vm_state building and task_state spawning. [ 1529.291072] env[63241]: DEBUG nova.compute.manager [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Received event network-vif-deleted-ef178c90-94df-45d1-97ca-ab5ef401691b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1529.291239] env[63241]: DEBUG nova.compute.manager [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Received event network-changed-c486fc54-8deb-4aab-89c9-18d333e4490e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1529.291388] env[63241]: DEBUG nova.compute.manager [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Refreshing instance network info cache due to event network-changed-c486fc54-8deb-4aab-89c9-18d333e4490e. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1529.291548] env[63241]: DEBUG oslo_concurrency.lockutils [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] Acquiring lock "refresh_cache-a534b054-2143-41c4-a0fa-028339ecdbbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.309018] env[63241]: DEBUG nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1529.309018] env[63241]: DEBUG nova.compute.provider_tree [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1529.326185] env[63241]: DEBUG nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1529.352689] env[63241]: DEBUG nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1529.463739] env[63241]: DEBUG nova.network.neutron [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1529.583753] env[63241]: DEBUG nova.compute.manager [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1529.617736] env[63241]: DEBUG nova.virt.hardware [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1529.617992] env[63241]: DEBUG nova.virt.hardware [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1529.618166] env[63241]: DEBUG nova.virt.hardware [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.618351] env[63241]: DEBUG nova.virt.hardware [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1529.618499] env[63241]: DEBUG nova.virt.hardware [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.618649] env[63241]: DEBUG nova.virt.hardware [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1529.618857] env[63241]: DEBUG nova.virt.hardware [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1529.619586] env[63241]: DEBUG nova.virt.hardware [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1529.619872] env[63241]: DEBUG nova.virt.hardware [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1529.620097] env[63241]: DEBUG nova.virt.hardware [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1529.620322] env[63241]: DEBUG nova.virt.hardware [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1529.621244] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42477a14-298c-4318-8303-b4821ebe782a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.632786] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041b3245-ec0e-4b7c-94de-5eb21663d63e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.723534] env[63241]: DEBUG oslo_concurrency.lockutils [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.737505] env[63241]: DEBUG nova.network.neutron [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Updating instance_info_cache with network_info: [{"id": "c486fc54-8deb-4aab-89c9-18d333e4490e", "address": "fa:16:3e:d9:b0:0a", "network": {"id": "32b2a8f4-d409-44ee-b788-45067b7819b8", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-688965581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64dbd26d47ae4b84ade0fc6114be0112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc486fc54-8d", "ovs_interfaceid": "c486fc54-8deb-4aab-89c9-18d333e4490e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.760762] env[63241]: INFO nova.compute.manager [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Took 47.59 seconds to build instance. [ 1529.872420] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46188fb7-8c6b-4200-95dc-1dc99976be9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.887048] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3716947-ca6b-4332-a984-1256a94b0fa6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.923772] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee61eb6-349b-4503-9ab9-21eb99355018 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.934114] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c7d231-670b-4094-9037-e75618d7fcd5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.954044] env[63241]: DEBUG nova.compute.provider_tree [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1529.967635] env[63241]: DEBUG nova.virt.hardware [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1529.967922] env[63241]: DEBUG nova.virt.hardware [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1529.968420] env[63241]: DEBUG nova.virt.hardware [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.968420] env[63241]: DEBUG nova.virt.hardware [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1529.968420] env[63241]: DEBUG nova.virt.hardware [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.969229] env[63241]: DEBUG nova.virt.hardware [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1529.969528] env[63241]: DEBUG nova.virt.hardware [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1529.969716] env[63241]: DEBUG nova.virt.hardware [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1529.969901] env[63241]: DEBUG nova.virt.hardware [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1529.970093] env[63241]: DEBUG nova.virt.hardware [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1529.970281] env[63241]: DEBUG nova.virt.hardware [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1529.971772] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4824e45-f964-43cd-ab9e-913f8e2d84d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.982645] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38db335-0b88-4362-b847-cdda8f36ada8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.998144] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:50:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '329d0e4b-4190-484a-8560-9356dc31beca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc8209bb-1cd1-4efc-806c-3fb04ffc73c5', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1530.004870] env[63241]: DEBUG oslo.service.loopingcall [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1530.005142] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1530.005358] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b017abdd-5326-421f-b893-aeae3b074370 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.026319] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1530.026319] env[63241]: value = "task-1820355" [ 1530.026319] env[63241]: _type = "Task" [ 1530.026319] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.037346] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820355, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.241342] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Releasing lock "refresh_cache-a534b054-2143-41c4-a0fa-028339ecdbbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.241724] env[63241]: DEBUG nova.compute.manager [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Instance network_info: |[{"id": "c486fc54-8deb-4aab-89c9-18d333e4490e", "address": "fa:16:3e:d9:b0:0a", "network": {"id": "32b2a8f4-d409-44ee-b788-45067b7819b8", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-688965581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64dbd26d47ae4b84ade0fc6114be0112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc486fc54-8d", "ovs_interfaceid": "c486fc54-8deb-4aab-89c9-18d333e4490e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1530.242125] env[63241]: DEBUG oslo_concurrency.lockutils [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] Acquired lock "refresh_cache-a534b054-2143-41c4-a0fa-028339ecdbbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.242347] env[63241]: DEBUG nova.network.neutron [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Refreshing network info cache for port c486fc54-8deb-4aab-89c9-18d333e4490e {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1530.243677] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:b0:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd177c5b3-a5b1-4c78-854e-7e0dbf341ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c486fc54-8deb-4aab-89c9-18d333e4490e', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1530.252267] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Creating folder: Project (64dbd26d47ae4b84ade0fc6114be0112). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1530.256229] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af54abdb-82b6-4b31-bd83-5c2505b5952b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.263210] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a94697bf-3f6a-4367-a8c5-a3760972733a tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.850s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.271233] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Created folder: Project (64dbd26d47ae4b84ade0fc6114be0112) in parent group-v376927. [ 1530.271894] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Creating folder: Instances. Parent ref: group-v377071. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1530.271894] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0adffa4f-52e0-49c5-8307-cba157c60a11 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.288254] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Created folder: Instances in parent group-v377071. [ 1530.288593] env[63241]: DEBUG oslo.service.loopingcall [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1530.288803] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1530.289113] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09ae98c8-9faa-47c7-83c4-f30e4dfb9f1d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.316882] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1530.316882] env[63241]: value = "task-1820358" [ 1530.316882] env[63241]: _type = "Task" [ 1530.316882] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.480157] env[63241]: ERROR nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] [req-ba59eb6b-e3e2-46b1-8362-3dae67fdd164] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ba59eb6b-e3e2-46b1-8362-3dae67fdd164"}]} [ 1530.499767] env[63241]: DEBUG nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1530.523779] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquiring lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.524279] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.528028] env[63241]: DEBUG nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1530.528028] env[63241]: DEBUG nova.compute.provider_tree [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1530.544467] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820355, 'name': CreateVM_Task, 'duration_secs': 0.375062} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.544643] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1530.545334] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.545498] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.545834] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1530.546117] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93c5fb0b-4683-4ab2-969f-9e4978eec010 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.549706] env[63241]: DEBUG nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1530.553289] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1530.553289] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5279cc9f-d64a-7027-79be-0f3b1d636c73" [ 1530.553289] env[63241]: _type = "Task" [ 1530.553289] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.561776] env[63241]: DEBUG nova.network.neutron [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Successfully updated port: 4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1530.574124] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5279cc9f-d64a-7027-79be-0f3b1d636c73, 'name': SearchDatastore_Task, 'duration_secs': 0.010912} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.575480] env[63241]: DEBUG nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1530.578235] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.578559] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1530.578835] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.578936] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.579136] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1530.579914] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39080103-01db-44d4-ab51-d6dd793e1d57 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.597021] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1530.597021] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1530.597021] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11aa8714-9906-4319-b33c-1cce460be155 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.602868] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1530.602868] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526afd2b-02a0-f38d-678f-2b3d39b7f104" [ 1530.602868] env[63241]: _type = "Task" [ 1530.602868] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.615381] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526afd2b-02a0-f38d-678f-2b3d39b7f104, 'name': SearchDatastore_Task, 'duration_secs': 0.010184} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.618938] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6778af93-94bf-4403-8e2a-da266fd32125 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.624968] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1530.624968] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52de48d6-969c-ed1b-a61a-b090e6851b95" [ 1530.624968] env[63241]: _type = "Task" [ 1530.624968] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.636222] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52de48d6-969c-ed1b-a61a-b090e6851b95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.645983] env[63241]: DEBUG nova.network.neutron [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Updated VIF entry in instance network info cache for port c486fc54-8deb-4aab-89c9-18d333e4490e. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1530.646365] env[63241]: DEBUG nova.network.neutron [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Updating instance_info_cache with network_info: [{"id": "c486fc54-8deb-4aab-89c9-18d333e4490e", "address": "fa:16:3e:d9:b0:0a", "network": {"id": "32b2a8f4-d409-44ee-b788-45067b7819b8", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-688965581-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64dbd26d47ae4b84ade0fc6114be0112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc486fc54-8d", "ovs_interfaceid": "c486fc54-8deb-4aab-89c9-18d333e4490e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.665086] env[63241]: DEBUG nova.compute.manager [req-ce974d61-ef5b-4a59-9201-463535a6702b req-0fa14f87-c7eb-4626-8349-38a58b05b6e9 service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Received event network-vif-plugged-4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1530.665348] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce974d61-ef5b-4a59-9201-463535a6702b req-0fa14f87-c7eb-4626-8349-38a58b05b6e9 service nova] Acquiring lock "c3c278a8-0513-4a7f-881e-b71c70206860-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.668699] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce974d61-ef5b-4a59-9201-463535a6702b req-0fa14f87-c7eb-4626-8349-38a58b05b6e9 service nova] Lock "c3c278a8-0513-4a7f-881e-b71c70206860-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.668699] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce974d61-ef5b-4a59-9201-463535a6702b req-0fa14f87-c7eb-4626-8349-38a58b05b6e9 service nova] Lock "c3c278a8-0513-4a7f-881e-b71c70206860-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.668699] env[63241]: DEBUG nova.compute.manager [req-ce974d61-ef5b-4a59-9201-463535a6702b req-0fa14f87-c7eb-4626-8349-38a58b05b6e9 service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] No waiting events found dispatching network-vif-plugged-4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1530.668699] env[63241]: WARNING nova.compute.manager [req-ce974d61-ef5b-4a59-9201-463535a6702b req-0fa14f87-c7eb-4626-8349-38a58b05b6e9 service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Received unexpected event network-vif-plugged-4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1 for instance with vm_state building and task_state spawning. [ 1530.827954] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820358, 'name': CreateVM_Task, 'duration_secs': 0.486313} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.830706] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1530.834151] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.834151] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.834151] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1530.834151] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb1c382f-d862-4f4a-a30f-5dac9210fefb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.842018] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1530.842018] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527c89d4-d5ba-e2ab-0a92-f6791968d8b0" [ 1530.842018] env[63241]: _type = "Task" [ 1530.842018] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.856219] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527c89d4-d5ba-e2ab-0a92-f6791968d8b0, 'name': SearchDatastore_Task, 'duration_secs': 0.011466} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.856219] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.856219] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1530.856219] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.030494] env[63241]: DEBUG nova.compute.manager [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1531.078861] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquiring lock "refresh_cache-c3c278a8-0513-4a7f-881e-b71c70206860" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.079015] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquired lock "refresh_cache-c3c278a8-0513-4a7f-881e-b71c70206860" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.079166] env[63241]: DEBUG nova.network.neutron [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1531.082820] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdece6a5-a231-4c10-8416-58091e4ce5ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.097415] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe91adbe-bc8e-4759-b572-d57deea1b5de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.138710] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fa4950-a1cd-43a3-8864-f39919a3b88f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.153020] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52de48d6-969c-ed1b-a61a-b090e6851b95, 'name': SearchDatastore_Task, 'duration_secs': 0.009729} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.153020] env[63241]: DEBUG oslo_concurrency.lockutils [req-0eda972c-3edc-49d0-b1ff-5d3ab6098ae1 req-98e3374e-8daf-42da-a44a-b29231e25d00 service nova] Releasing lock "refresh_cache-a534b054-2143-41c4-a0fa-028339ecdbbf" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.153020] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.153020] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1531.153652] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992dfdbe-aa57-424c-95c6-6baf77eb3f09 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.159526] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.159526] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1531.159526] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-540d1b19-2a3f-4221-9620-cfc376fbc3b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.160042] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71746eb8-52c4-47e5-b53f-881317f03042 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.173216] env[63241]: DEBUG nova.compute.provider_tree [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1531.176896] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1531.176896] env[63241]: value = "task-1820359" [ 1531.176896] env[63241]: _type = "Task" [ 1531.176896] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.177103] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1531.177295] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1531.178311] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b04f5f5-cefb-44ec-9eb9-d17a93ed23ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.189319] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820359, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.190999] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1531.190999] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5229992c-0e5f-fa7b-7a16-067a93dd1859" [ 1531.190999] env[63241]: _type = "Task" [ 1531.190999] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.199934] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5229992c-0e5f-fa7b-7a16-067a93dd1859, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.562930] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.616093] env[63241]: DEBUG nova.network.neutron [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1531.693561] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820359, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52255} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.696959] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1531.697162] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1531.697446] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4f17c0e-8100-49a1-9f06-4dadafb8d20f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.706710] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5229992c-0e5f-fa7b-7a16-067a93dd1859, 'name': SearchDatastore_Task, 'duration_secs': 0.020587} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.708655] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1531.708655] env[63241]: value = "task-1820360" [ 1531.708655] env[63241]: _type = "Task" [ 1531.708655] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.708890] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d0e41ba-188e-4c3d-991a-49064edc527f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.712398] env[63241]: DEBUG nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1531.712647] env[63241]: DEBUG nova.compute.provider_tree [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 72 to 73 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1531.712868] env[63241]: DEBUG nova.compute.provider_tree [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1531.727024] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1531.727024] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524f24ce-b333-4067-16eb-fac9b5623926" [ 1531.727024] env[63241]: _type = "Task" [ 1531.727024] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.728267] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820360, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.740023] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524f24ce-b333-4067-16eb-fac9b5623926, 'name': SearchDatastore_Task, 'duration_secs': 0.010434} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.740023] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.740023] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a534b054-2143-41c4-a0fa-028339ecdbbf/a534b054-2143-41c4-a0fa-028339ecdbbf.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1531.740023] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e6cc62f-53c8-415a-8c2c-5407206b45e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.748155] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1531.748155] env[63241]: value = "task-1820361" [ 1531.748155] env[63241]: _type = "Task" [ 1531.748155] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.756867] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820361, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.844736] env[63241]: DEBUG nova.network.neutron [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Updating instance_info_cache with network_info: [{"id": "4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1", "address": "fa:16:3e:f3:d9:98", "network": {"id": "02397b3d-07e9-487e-bb7e-c929aedd9c5d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-739652610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d455c7660f544a0ea156ce0f7aa3515c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dabbba4-ee", "ovs_interfaceid": "4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.223574] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.668s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.225873] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820360, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071133} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.226611] env[63241]: DEBUG oslo_concurrency.lockutils [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.980s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.226887] env[63241]: DEBUG nova.objects.instance [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lazy-loading 'resources' on Instance uuid 5203c12e-14a0-4736-8185-8ead9a29b03b {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1532.228430] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1532.229672] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4adeb045-d824-4499-b398-afa960ba6c24 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.257162] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1532.258361] env[63241]: INFO nova.scheduler.client.report [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Deleted allocations for instance 0b7c72e0-79b9-4435-9676-7a0e9afaf936 [ 1532.259655] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42bf9e9d-b70c-4b93-b19b-f97b65a4da41 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.287445] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820361, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479932} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.288844] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a534b054-2143-41c4-a0fa-028339ecdbbf/a534b054-2143-41c4-a0fa-028339ecdbbf.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1532.289092] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1532.289417] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1532.289417] env[63241]: value = "task-1820362" [ 1532.289417] env[63241]: _type = "Task" [ 1532.289417] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.289640] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ecd1e1d0-68e5-4589-a464-893d875aa621 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.301687] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820362, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.303741] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1532.303741] env[63241]: value = "task-1820363" [ 1532.303741] env[63241]: _type = "Task" [ 1532.303741] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.312168] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.347128] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Releasing lock "refresh_cache-c3c278a8-0513-4a7f-881e-b71c70206860" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.347506] env[63241]: DEBUG nova.compute.manager [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Instance network_info: |[{"id": "4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1", "address": "fa:16:3e:f3:d9:98", "network": {"id": "02397b3d-07e9-487e-bb7e-c929aedd9c5d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-739652610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d455c7660f544a0ea156ce0f7aa3515c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dabbba4-ee", "ovs_interfaceid": "4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1532.347960] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:d9:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c80243e-93a7-4a95-bc8d-e9534bacd66e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1532.355946] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Creating folder: Project (d455c7660f544a0ea156ce0f7aa3515c). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1532.357039] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-55b363b2-8e90-4421-89d2-fde73096efc5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.370239] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Created folder: Project (d455c7660f544a0ea156ce0f7aa3515c) in parent group-v376927. [ 1532.370570] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Creating folder: Instances. Parent ref: group-v377074. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1532.370873] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-919e3abe-cebb-4abb-b713-fac8a08a2c71 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.381205] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Created folder: Instances in parent group-v377074. [ 1532.381497] env[63241]: DEBUG oslo.service.loopingcall [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.381714] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1532.381958] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41648cc7-35e7-4a3a-8099-5af40192f982 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.403212] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1532.403212] env[63241]: value = "task-1820366" [ 1532.403212] env[63241]: _type = "Task" [ 1532.403212] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.413865] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820366, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.700312] env[63241]: DEBUG nova.compute.manager [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Received event network-changed-4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1532.700312] env[63241]: DEBUG nova.compute.manager [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Refreshing instance network info cache due to event network-changed-4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1532.700672] env[63241]: DEBUG oslo_concurrency.lockutils [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] Acquiring lock "refresh_cache-c3c278a8-0513-4a7f-881e-b71c70206860" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.700672] env[63241]: DEBUG oslo_concurrency.lockutils [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] Acquired lock "refresh_cache-c3c278a8-0513-4a7f-881e-b71c70206860" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.700778] env[63241]: DEBUG nova.network.neutron [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Refreshing network info cache for port 4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1532.785010] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bb0b8214-c2eb-4faf-a2d1-49fadd486a7f tempest-ServerGroupTestJSON-1793503604 tempest-ServerGroupTestJSON-1793503604-project-member] Lock "0b7c72e0-79b9-4435-9676-7a0e9afaf936" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.905s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.802172] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820362, 'name': ReconfigVM_Task, 'duration_secs': 0.306834} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.805365] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Reconfigured VM instance instance-00000006 to attach disk [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf/a1a8342a-b00e-42c1-8c01-a95659a78caf.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1532.806274] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09f5b0d8-a2a9-4899-9f7e-f583d9f3cc17 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.818539] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076858} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.822076] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1532.822442] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1532.822442] env[63241]: value = "task-1820367" [ 1532.822442] env[63241]: _type = "Task" [ 1532.822442] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.823300] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51ed58a-bd5e-4964-8069-cb232612a13f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.848040] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820367, 'name': Rename_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.857074] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] a534b054-2143-41c4-a0fa-028339ecdbbf/a534b054-2143-41c4-a0fa-028339ecdbbf.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1532.859962] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-564c05bc-7b61-4bb4-8a81-af0ef7c35c65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.881798] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1532.881798] env[63241]: value = "task-1820368" [ 1532.881798] env[63241]: _type = "Task" [ 1532.881798] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.894161] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820368, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.915478] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820366, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.244722] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21076e0d-4726-4b35-a4cd-de5e19a6ccd8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.254135] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a333a581-ad1b-4b5a-975d-cd97832307fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.286812] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fe9f17-5110-447c-b5e9-cd6b2d438102 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.297122] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63eab08d-69db-446c-87dc-e8c38df6bf0e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.314067] env[63241]: DEBUG nova.compute.provider_tree [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1533.340079] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820367, 'name': Rename_Task, 'duration_secs': 0.154164} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.340079] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1533.340079] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31cdeaa6-e2bc-4606-814b-d5ae98643615 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.350079] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1533.350079] env[63241]: value = "task-1820369" [ 1533.350079] env[63241]: _type = "Task" [ 1533.350079] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.359371] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820369, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.394114] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.414653] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820366, 'name': CreateVM_Task, 'duration_secs': 0.701113} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.414931] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1533.415751] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.415942] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.416482] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1533.416757] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feb3a4f1-6537-45f5-8b3a-d41433f7de2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.423705] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for the task: (returnval){ [ 1533.423705] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e31133-80c2-c305-3786-d917d330bff6" [ 1533.423705] env[63241]: _type = "Task" [ 1533.423705] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.432857] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e31133-80c2-c305-3786-d917d330bff6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.826946] env[63241]: DEBUG nova.network.neutron [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Updated VIF entry in instance network info cache for port 4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1533.827794] env[63241]: DEBUG nova.network.neutron [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Updating instance_info_cache with network_info: [{"id": "4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1", "address": "fa:16:3e:f3:d9:98", "network": {"id": "02397b3d-07e9-487e-bb7e-c929aedd9c5d", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-739652610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d455c7660f544a0ea156ce0f7aa3515c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c80243e-93a7-4a95-bc8d-e9534bacd66e", "external-id": "nsx-vlan-transportzone-306", "segmentation_id": 306, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dabbba4-ee", "ovs_interfaceid": "4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.865452] env[63241]: DEBUG oslo_vmware.api [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820369, 'name': PowerOnVM_Task, 'duration_secs': 0.481898} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.867990] env[63241]: DEBUG nova.scheduler.client.report [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 73 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1533.867990] env[63241]: DEBUG nova.compute.provider_tree [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 73 to 74 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1533.867990] env[63241]: DEBUG nova.compute.provider_tree [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1533.870953] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1533.876754] env[63241]: DEBUG nova.compute.manager [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1533.876754] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d618da-1c3f-45c9-adb1-b5be389838a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.901472] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820368, 'name': ReconfigVM_Task, 'duration_secs': 0.637375} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.902926] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Reconfigured VM instance instance-00000033 to attach disk [datastore1] a534b054-2143-41c4-a0fa-028339ecdbbf/a534b054-2143-41c4-a0fa-028339ecdbbf.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1533.903324] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d42c5cfd-1182-48f4-90ed-3e8ed79f1913 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.913533] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1533.913533] env[63241]: value = "task-1820370" [ 1533.913533] env[63241]: _type = "Task" [ 1533.913533] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.926883] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820370, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.939892] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e31133-80c2-c305-3786-d917d330bff6, 'name': SearchDatastore_Task, 'duration_secs': 0.016813} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.941675] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.941797] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1533.942544] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.942544] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.942544] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1533.943177] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10d823af-3169-4427-92b4-243a2dc429a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.959708] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1533.961128] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1533.962559] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-557c4d0d-538a-41de-8eff-516aa587c4c6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.973027] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for the task: (returnval){ [ 1533.973027] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d7e7a6-cca1-4e3d-c262-4be3e3033269" [ 1533.973027] env[63241]: _type = "Task" [ 1533.973027] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.985745] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d7e7a6-cca1-4e3d-c262-4be3e3033269, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.332735] env[63241]: DEBUG oslo_concurrency.lockutils [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] Releasing lock "refresh_cache-c3c278a8-0513-4a7f-881e-b71c70206860" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.333025] env[63241]: DEBUG nova.compute.manager [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Received event network-changed-dcdf6593-f699-4bf0-8fa5-16a49caabae8 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1534.333189] env[63241]: DEBUG nova.compute.manager [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Refreshing instance network info cache due to event network-changed-dcdf6593-f699-4bf0-8fa5-16a49caabae8. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1534.333405] env[63241]: DEBUG oslo_concurrency.lockutils [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] Acquiring lock "refresh_cache-c7b034f7-1d7f-4782-9ecb-5987c35339cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.333554] env[63241]: DEBUG oslo_concurrency.lockutils [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] Acquired lock "refresh_cache-c7b034f7-1d7f-4782-9ecb-5987c35339cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.333716] env[63241]: DEBUG nova.network.neutron [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Refreshing network info cache for port dcdf6593-f699-4bf0-8fa5-16a49caabae8 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1534.379865] env[63241]: DEBUG oslo_concurrency.lockutils [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.153s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.383354] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.733s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.383767] env[63241]: DEBUG nova.objects.instance [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lazy-loading 'resources' on Instance uuid 5060e745-08d0-429e-8780-bfdad7a29f30 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1534.404233] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.409624] env[63241]: INFO nova.scheduler.client.report [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted allocations for instance 5203c12e-14a0-4736-8185-8ead9a29b03b [ 1534.429027] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820370, 'name': Rename_Task, 'duration_secs': 0.274194} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.429027] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1534.429027] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee4de7e6-edfd-4a4f-a4fa-e7c3a430ffae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.438282] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1534.438282] env[63241]: value = "task-1820371" [ 1534.438282] env[63241]: _type = "Task" [ 1534.438282] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.449184] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820371, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.488030] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d7e7a6-cca1-4e3d-c262-4be3e3033269, 'name': SearchDatastore_Task, 'duration_secs': 0.013091} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.488312] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56556d2d-44f1-41e6-a338-7e070bea0485 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.498016] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for the task: (returnval){ [ 1534.498016] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52dd64e0-4f07-ec50-b7d4-0f558bfab977" [ 1534.498016] env[63241]: _type = "Task" [ 1534.498016] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.510793] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dd64e0-4f07-ec50-b7d4-0f558bfab977, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.920512] env[63241]: DEBUG oslo_concurrency.lockutils [None req-389c1d7d-cf9c-4f0d-a3f7-97c3b94c7e74 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "5203c12e-14a0-4736-8185-8ead9a29b03b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.016s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.952691] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820371, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.014090] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dd64e0-4f07-ec50-b7d4-0f558bfab977, 'name': SearchDatastore_Task, 'duration_secs': 0.015892} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.014397] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.014663] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c3c278a8-0513-4a7f-881e-b71c70206860/c3c278a8-0513-4a7f-881e-b71c70206860.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1535.015966] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27f2ae71-5eb9-4fce-b457-662b939d214a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.025458] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for the task: (returnval){ [ 1535.025458] env[63241]: value = "task-1820372" [ 1535.025458] env[63241]: _type = "Task" [ 1535.025458] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.041870] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.286297] env[63241]: DEBUG nova.network.neutron [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Updated VIF entry in instance network info cache for port dcdf6593-f699-4bf0-8fa5-16a49caabae8. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1535.286831] env[63241]: DEBUG nova.network.neutron [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Updating instance_info_cache with network_info: [{"id": "dcdf6593-f699-4bf0-8fa5-16a49caabae8", "address": "fa:16:3e:1d:eb:0a", "network": {"id": "a8367b18-022c-41b4-8c92-d1415c31263d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2039791152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fedeb3768ebc4b96bd5a85bfb0a03cf8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcdf6593-f6", "ovs_interfaceid": "dcdf6593-f699-4bf0-8fa5-16a49caabae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.438328] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08588aa-ad9a-4e05-9ca8-06124247adee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.456890] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1ffb5e-bb00-4a61-a149-fd278ed2f256 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.460651] env[63241]: DEBUG oslo_vmware.api [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820371, 'name': PowerOnVM_Task, 'duration_secs': 0.933583} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.461028] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1535.461204] env[63241]: INFO nova.compute.manager [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Took 8.56 seconds to spawn the instance on the hypervisor. [ 1535.461385] env[63241]: DEBUG nova.compute.manager [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1535.462854] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3339da95-46c8-43de-883a-3b43c24eda04 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.494318] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c4ac60-bc99-46e4-bca4-f7f84611e218 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.507649] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08bfbfc7-4a04-491d-a7c7-6f87fc35e47d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.523785] env[63241]: DEBUG nova.compute.provider_tree [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1535.536014] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820372, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.552142] env[63241]: DEBUG oslo_concurrency.lockutils [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "e753da08-d4a5-4f17-85c8-154e843798c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.552726] env[63241]: DEBUG oslo_concurrency.lockutils [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "e753da08-d4a5-4f17-85c8-154e843798c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.554534] env[63241]: DEBUG oslo_concurrency.lockutils [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "e753da08-d4a5-4f17-85c8-154e843798c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.554534] env[63241]: DEBUG oslo_concurrency.lockutils [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "e753da08-d4a5-4f17-85c8-154e843798c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.554534] env[63241]: DEBUG oslo_concurrency.lockutils [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "e753da08-d4a5-4f17-85c8-154e843798c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.558938] env[63241]: INFO nova.compute.manager [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Terminating instance [ 1535.561261] env[63241]: DEBUG nova.compute.manager [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1535.561465] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1535.562545] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e492c5e8-a32b-45f5-ab07-699392016fa1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.572207] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1535.572505] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e98bcbe6-aa8d-462f-8e36-4c3983fbc198 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.581179] env[63241]: DEBUG oslo_vmware.api [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1535.581179] env[63241]: value = "task-1820373" [ 1535.581179] env[63241]: _type = "Task" [ 1535.581179] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.594865] env[63241]: DEBUG oslo_vmware.api [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.790934] env[63241]: DEBUG oslo_concurrency.lockutils [req-d781690a-b843-4d1c-83e4-caf4ee308d73 req-7553fbb6-1a7b-4cc0-8402-1ee5de7ffefa service nova] Releasing lock "refresh_cache-c7b034f7-1d7f-4782-9ecb-5987c35339cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.012260] env[63241]: INFO nova.compute.manager [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Took 45.14 seconds to build instance. [ 1536.027519] env[63241]: DEBUG nova.scheduler.client.report [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1536.043318] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640759} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.043664] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c3c278a8-0513-4a7f-881e-b71c70206860/c3c278a8-0513-4a7f-881e-b71c70206860.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1536.043814] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1536.044079] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11ff34a1-da8e-47f5-90e9-eefb7074e8ae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.051348] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for the task: (returnval){ [ 1536.051348] env[63241]: value = "task-1820374" [ 1536.051348] env[63241]: _type = "Task" [ 1536.051348] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.060240] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820374, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.093569] env[63241]: DEBUG oslo_vmware.api [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820373, 'name': PowerOffVM_Task, 'duration_secs': 0.438549} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.093982] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1536.094200] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1536.094458] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-445af96f-333e-4715-9dda-dd0d66cc1821 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.194054] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1536.194192] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1536.194459] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleting the datastore file [datastore1] e753da08-d4a5-4f17-85c8-154e843798c9 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1536.194671] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-339133e6-34fa-4d04-9e34-56cbb425691f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.202075] env[63241]: DEBUG oslo_vmware.api [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1536.202075] env[63241]: value = "task-1820376" [ 1536.202075] env[63241]: _type = "Task" [ 1536.202075] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.211451] env[63241]: DEBUG oslo_vmware.api [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.513675] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "9e6ca606-383d-42f0-aea4-edecde33c1a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.513959] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "9e6ca606-383d-42f0-aea4-edecde33c1a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.515392] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9842e337-0ff4-4039-92ed-a83bbf1648ab tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lock "a534b054-2143-41c4-a0fa-028339ecdbbf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.656s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.537091] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.154s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.540042] env[63241]: DEBUG oslo_concurrency.lockutils [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.584s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.540042] env[63241]: DEBUG nova.objects.instance [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lazy-loading 'resources' on Instance uuid 0c72c98b-57f0-44e5-9159-490b27eac3a6 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1536.563033] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162886} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.564370] env[63241]: INFO nova.scheduler.client.report [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Deleted allocations for instance 5060e745-08d0-429e-8780-bfdad7a29f30 [ 1536.566014] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1536.568213] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59574335-a95e-4577-a876-249cad188059 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.597313] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] c3c278a8-0513-4a7f-881e-b71c70206860/c3c278a8-0513-4a7f-881e-b71c70206860.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1536.597710] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59b8c24d-5d36-467e-8d89-98a57dd9663e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.620480] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for the task: (returnval){ [ 1536.620480] env[63241]: value = "task-1820377" [ 1536.620480] env[63241]: _type = "Task" [ 1536.620480] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.631171] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820377, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.715134] env[63241]: DEBUG oslo_vmware.api [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.475912} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.715403] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1536.715601] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1536.715797] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1536.715985] env[63241]: INFO nova.compute.manager [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1536.716268] env[63241]: DEBUG oslo.service.loopingcall [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1536.716460] env[63241]: DEBUG nova.compute.manager [-] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1536.716557] env[63241]: DEBUG nova.network.neutron [-] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1537.016663] env[63241]: DEBUG nova.compute.manager [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1537.079833] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d441c0eb-8fff-42d1-83e6-0c3e947b22f7 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "5060e745-08d0-429e-8780-bfdad7a29f30" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.070s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.119224] env[63241]: DEBUG nova.compute.manager [req-31e4ad2b-f10f-4d32-b1ba-982173dbe600 req-309e7f91-8dff-444d-8e05-7ae058f4b74e service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Received event network-vif-deleted-5727d5d3-d1ae-4830-a899-52c5d7ea9414 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1537.119538] env[63241]: INFO nova.compute.manager [req-31e4ad2b-f10f-4d32-b1ba-982173dbe600 req-309e7f91-8dff-444d-8e05-7ae058f4b74e service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Neutron deleted interface 5727d5d3-d1ae-4830-a899-52c5d7ea9414; detaching it from the instance and deleting it from the info cache [ 1537.119649] env[63241]: DEBUG nova.network.neutron [req-31e4ad2b-f10f-4d32-b1ba-982173dbe600 req-309e7f91-8dff-444d-8e05-7ae058f4b74e service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.135424] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820377, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.481839] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.482197] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.482426] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.482627] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.482974] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.487363] env[63241]: INFO nova.compute.manager [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Terminating instance [ 1537.489973] env[63241]: DEBUG nova.compute.manager [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1537.490137] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1537.492432] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f438795-11bb-47b8-893e-8b2973394339 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.495401] env[63241]: DEBUG nova.network.neutron [-] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.499794] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1537.502550] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2c90a11-ad7b-4db1-816c-1115e727d81f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.511388] env[63241]: DEBUG oslo_vmware.api [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1537.511388] env[63241]: value = "task-1820378" [ 1537.511388] env[63241]: _type = "Task" [ 1537.511388] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.517112] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5080bc15-1103-48ca-9c70-7161df16a073 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.526416] env[63241]: DEBUG oslo_vmware.api [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.535142] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ea28f0-96a3-4565-972a-f8e25adee796 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.581072] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.582300] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a48994c-b157-494e-a88b-d73a6d271f9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.593042] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e19f0b5-3a5f-4785-8c7f-84ffadcdd043 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.613385] env[63241]: DEBUG nova.compute.provider_tree [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1537.623027] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c999107c-033c-4d0f-9132-bbb5e2ed3aad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.638643] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820377, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.644158] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b5fb9d-53ed-43c0-9309-83d34f6f8d8b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.680519] env[63241]: DEBUG nova.compute.manager [req-31e4ad2b-f10f-4d32-b1ba-982173dbe600 req-309e7f91-8dff-444d-8e05-7ae058f4b74e service nova] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Detach interface failed, port_id=5727d5d3-d1ae-4830-a899-52c5d7ea9414, reason: Instance e753da08-d4a5-4f17-85c8-154e843798c9 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1537.997161] env[63241]: INFO nova.compute.manager [-] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Took 1.28 seconds to deallocate network for instance. [ 1538.022224] env[63241]: DEBUG oslo_vmware.api [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820378, 'name': PowerOffVM_Task, 'duration_secs': 0.233834} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.022509] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1538.022687] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1538.022951] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18bfea35-dbfc-4127-9618-472f83e4511f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.116125] env[63241]: DEBUG nova.compute.manager [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1538.117143] env[63241]: DEBUG nova.scheduler.client.report [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1538.121621] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e2f52c-708e-436f-bf4b-57066fe501fc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.135118] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820377, 'name': ReconfigVM_Task, 'duration_secs': 1.095916} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.138130] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Reconfigured VM instance instance-00000034 to attach disk [datastore1] c3c278a8-0513-4a7f-881e-b71c70206860/c3c278a8-0513-4a7f-881e-b71c70206860.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1538.139031] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f777ce39-8e2c-418b-8ce1-493881d48f90 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.147743] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for the task: (returnval){ [ 1538.147743] env[63241]: value = "task-1820380" [ 1538.147743] env[63241]: _type = "Task" [ 1538.147743] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.156543] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820380, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.503436] env[63241]: DEBUG oslo_concurrency.lockutils [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.594567] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1538.594841] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1538.595098] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Deleting the datastore file [datastore1] 0115b03b-c828-4e8b-a4d2-c98f8ca69c66 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1538.595423] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ef3a6d0-bf44-4026-af77-5b253615a206 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.603534] env[63241]: DEBUG oslo_vmware.api [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for the task: (returnval){ [ 1538.603534] env[63241]: value = "task-1820381" [ 1538.603534] env[63241]: _type = "Task" [ 1538.603534] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.613406] env[63241]: DEBUG oslo_vmware.api [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820381, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.626407] env[63241]: DEBUG oslo_concurrency.lockutils [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.087s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.628664] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.412s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.630252] env[63241]: INFO nova.compute.claims [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1538.640606] env[63241]: INFO nova.compute.manager [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] instance snapshotting [ 1538.643325] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2ea13e-aa84-4633-9185-19ecb4733232 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.671019] env[63241]: INFO nova.scheduler.client.report [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Deleted allocations for instance 0c72c98b-57f0-44e5-9159-490b27eac3a6 [ 1538.671019] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63bab38-2db3-470c-af20-357193863042 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.678887] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820380, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.116042] env[63241]: DEBUG oslo_vmware.api [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Task: {'id': task-1820381, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291142} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.116186] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1539.116344] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1539.116553] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1539.116746] env[63241]: INFO nova.compute.manager [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1539.117046] env[63241]: DEBUG oslo.service.loopingcall [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1539.117271] env[63241]: DEBUG nova.compute.manager [-] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1539.117380] env[63241]: DEBUG nova.network.neutron [-] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1539.158600] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820380, 'name': Rename_Task, 'duration_secs': 0.714597} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.158877] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1539.159126] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdfc1434-25df-404d-9154-c4b15096e21d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.167074] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for the task: (returnval){ [ 1539.167074] env[63241]: value = "task-1820382" [ 1539.167074] env[63241]: _type = "Task" [ 1539.167074] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.175187] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820382, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.182065] env[63241]: DEBUG oslo_concurrency.lockutils [None req-342e08d3-b0a1-4959-a31f-5aa5ac09aed4 tempest-FloatingIPsAssociationTestJSON-679460102 tempest-FloatingIPsAssociationTestJSON-679460102-project-member] Lock "0c72c98b-57f0-44e5-9159-490b27eac3a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.421s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.189533] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1539.189793] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-43bb97e4-58fe-462e-b0d8-dbbdbcf07c0d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.198200] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1539.198200] env[63241]: value = "task-1820383" [ 1539.198200] env[63241]: _type = "Task" [ 1539.198200] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.208066] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820383, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.421430] env[63241]: DEBUG nova.compute.manager [req-ee4ac075-8eee-41e4-b82c-659d15b8acd0 req-b3360be8-0c40-44f1-bd2b-43d44daa4bf4 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Received event network-vif-deleted-79c31016-2c0a-49c7-bfd0-bbed6734219c {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1539.421430] env[63241]: INFO nova.compute.manager [req-ee4ac075-8eee-41e4-b82c-659d15b8acd0 req-b3360be8-0c40-44f1-bd2b-43d44daa4bf4 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Neutron deleted interface 79c31016-2c0a-49c7-bfd0-bbed6734219c; detaching it from the instance and deleting it from the info cache [ 1539.421520] env[63241]: DEBUG nova.network.neutron [req-ee4ac075-8eee-41e4-b82c-659d15b8acd0 req-b3360be8-0c40-44f1-bd2b-43d44daa4bf4 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.681824] env[63241]: DEBUG oslo_vmware.api [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820382, 'name': PowerOnVM_Task, 'duration_secs': 0.511152} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.682113] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1539.682245] env[63241]: INFO nova.compute.manager [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Took 10.10 seconds to spawn the instance on the hypervisor. [ 1539.682432] env[63241]: DEBUG nova.compute.manager [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1539.685951] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05a1f6d-f785-40c4-a06d-78c6502081d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.708783] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820383, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.897929] env[63241]: DEBUG nova.network.neutron [-] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.926874] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3828c17a-a03c-4604-9c85-3813db733b73 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.937282] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd36474d-2f50-4208-a6ac-6dab1fe4ecfa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.974014] env[63241]: DEBUG nova.compute.manager [req-ee4ac075-8eee-41e4-b82c-659d15b8acd0 req-b3360be8-0c40-44f1-bd2b-43d44daa4bf4 service nova] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Detach interface failed, port_id=79c31016-2c0a-49c7-bfd0-bbed6734219c, reason: Instance 0115b03b-c828-4e8b-a4d2-c98f8ca69c66 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1540.101821] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7990488e-385c-4c70-bca8-c1cc08e3582b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.112170] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edda87ed-e8c1-4a82-a057-ce888737f598 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.145815] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a303f0-e9e1-4260-a160-6fd1ae357feb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.153745] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e641c8d0-82cd-466d-885a-011617147fdb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.169159] env[63241]: DEBUG nova.compute.provider_tree [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1540.210743] env[63241]: INFO nova.compute.manager [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Took 48.72 seconds to build instance. [ 1540.215933] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820383, 'name': CreateSnapshot_Task, 'duration_secs': 0.958648} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.216362] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1540.217095] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111e0928-ee54-4d59-a2d3-a71e0bdc6713 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.400307] env[63241]: INFO nova.compute.manager [-] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Took 1.28 seconds to deallocate network for instance. [ 1540.672590] env[63241]: DEBUG nova.scheduler.client.report [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1540.717477] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5daaedc4-c7a8-427c-9822-6adbc207b2c7 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Lock "c3c278a8-0513-4a7f-881e-b71c70206860" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.244s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.734895] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1540.735245] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1e48c0d4-3935-46f3-9781-e3d008fefeef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.745320] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1540.745320] env[63241]: value = "task-1820384" [ 1540.745320] env[63241]: _type = "Task" [ 1540.745320] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.755936] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820384, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.909210] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.178453] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.179069] env[63241]: DEBUG nova.compute.manager [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1541.181901] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.177s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.182172] env[63241]: DEBUG nova.objects.instance [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lazy-loading 'resources' on Instance uuid efbe39fa-d581-41ac-b51c-9c94c9839d7a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1541.258818] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820384, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.690416] env[63241]: DEBUG nova.compute.utils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1541.690766] env[63241]: DEBUG nova.compute.manager [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1541.690958] env[63241]: DEBUG nova.network.neutron [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1541.757123] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820384, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.783963] env[63241]: DEBUG nova.policy [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53f691b52644488c832ce1224a079218', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e48fc59560ab47ae87be73ab11b13e7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1542.194456] env[63241]: DEBUG nova.compute.manager [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1542.235771] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8e63b5-6c41-41c5-a4fc-cb0cca2b0fd9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.245496] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9890f4b1-ffa4-4a78-b58e-14912d3837a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.286970] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820384, 'name': CloneVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.287912] env[63241]: DEBUG nova.network.neutron [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Successfully created port: fbb105cc-6670-49e6-8d68-5e7a6db44e8d {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1542.290442] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac91d1b-6cf3-4cf4-9bfb-d4d8510f4f9c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.299990] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c97d43-4bf9-4ac6-9e85-04ddb3336190 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.317823] env[63241]: DEBUG nova.compute.provider_tree [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1542.618637] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquiring lock "c3c278a8-0513-4a7f-881e-b71c70206860" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.618781] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Lock "c3c278a8-0513-4a7f-881e-b71c70206860" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.619080] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquiring lock "c3c278a8-0513-4a7f-881e-b71c70206860-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.619189] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Lock "c3c278a8-0513-4a7f-881e-b71c70206860-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.619287] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Lock "c3c278a8-0513-4a7f-881e-b71c70206860-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.625011] env[63241]: INFO nova.compute.manager [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Terminating instance [ 1542.627449] env[63241]: DEBUG nova.compute.manager [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1542.627449] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1542.627928] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaef7382-ea71-4e44-9b0b-aeb3842b92c1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.636412] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1542.636681] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fb4a0a8-f1e3-4d99-941a-6efe954eadc0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.645846] env[63241]: DEBUG oslo_vmware.api [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for the task: (returnval){ [ 1542.645846] env[63241]: value = "task-1820385" [ 1542.645846] env[63241]: _type = "Task" [ 1542.645846] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.654272] env[63241]: DEBUG oslo_vmware.api [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820385, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.762559] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820384, 'name': CloneVM_Task, 'duration_secs': 1.555431} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.762855] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Created linked-clone VM from snapshot [ 1542.763606] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43be6db-7d17-4855-bf82-f4c947a9eab4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.774092] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Uploading image 9ad68a25-776e-4c94-b8a5-166e8724b00c {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1542.802121] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1542.802121] env[63241]: value = "vm-377078" [ 1542.802121] env[63241]: _type = "VirtualMachine" [ 1542.802121] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1542.802121] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e4d8b135-8d33-4fec-aa55-f5337fbb4c1f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.814842] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lease: (returnval){ [ 1542.814842] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c0ca07-0a77-33d7-4489-ab92a8354c72" [ 1542.814842] env[63241]: _type = "HttpNfcLease" [ 1542.814842] env[63241]: } obtained for exporting VM: (result){ [ 1542.814842] env[63241]: value = "vm-377078" [ 1542.814842] env[63241]: _type = "VirtualMachine" [ 1542.814842] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1542.814842] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the lease: (returnval){ [ 1542.814842] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c0ca07-0a77-33d7-4489-ab92a8354c72" [ 1542.814842] env[63241]: _type = "HttpNfcLease" [ 1542.814842] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1542.825268] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1542.825268] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c0ca07-0a77-33d7-4489-ab92a8354c72" [ 1542.825268] env[63241]: _type = "HttpNfcLease" [ 1542.825268] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1542.842853] env[63241]: ERROR nova.scheduler.client.report [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [req-9a944b6c-b11c-40b0-8f7b-a1661e88f07a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9a944b6c-b11c-40b0-8f7b-a1661e88f07a"}]} [ 1542.862577] env[63241]: DEBUG nova.scheduler.client.report [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1542.886529] env[63241]: DEBUG nova.scheduler.client.report [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1542.886937] env[63241]: DEBUG nova.compute.provider_tree [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1542.907919] env[63241]: DEBUG nova.scheduler.client.report [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1542.950011] env[63241]: DEBUG nova.scheduler.client.report [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1543.157358] env[63241]: DEBUG oslo_vmware.api [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820385, 'name': PowerOffVM_Task, 'duration_secs': 0.281383} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.157626] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1543.157827] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1543.158568] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da112ebe-ca24-4efc-8974-ee4cab07f85a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.204738] env[63241]: DEBUG nova.compute.manager [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1543.230886] env[63241]: DEBUG nova.virt.hardware [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1543.231141] env[63241]: DEBUG nova.virt.hardware [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1543.231295] env[63241]: DEBUG nova.virt.hardware [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1543.232029] env[63241]: DEBUG nova.virt.hardware [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1543.232029] env[63241]: DEBUG nova.virt.hardware [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1543.232029] env[63241]: DEBUG nova.virt.hardware [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1543.232029] env[63241]: DEBUG nova.virt.hardware [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1543.233190] env[63241]: DEBUG nova.virt.hardware [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1543.233190] env[63241]: DEBUG nova.virt.hardware [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1543.233316] env[63241]: DEBUG nova.virt.hardware [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1543.233813] env[63241]: DEBUG nova.virt.hardware [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1543.235463] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc775308-ded8-4221-8912-bf6ae4b4a138 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.247999] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68000d5d-523a-4a34-acc3-32027e632091 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.308139] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1543.308139] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1543.308139] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Deleting the datastore file [datastore1] c3c278a8-0513-4a7f-881e-b71c70206860 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1543.308139] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88ced024-897e-428b-b680-94df30284cc5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.321030] env[63241]: DEBUG oslo_vmware.api [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for the task: (returnval){ [ 1543.321030] env[63241]: value = "task-1820388" [ 1543.321030] env[63241]: _type = "Task" [ 1543.321030] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.332743] env[63241]: DEBUG oslo_vmware.api [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820388, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.338612] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1543.338612] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c0ca07-0a77-33d7-4489-ab92a8354c72" [ 1543.338612] env[63241]: _type = "HttpNfcLease" [ 1543.338612] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1543.338612] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1543.338612] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c0ca07-0a77-33d7-4489-ab92a8354c72" [ 1543.338612] env[63241]: _type = "HttpNfcLease" [ 1543.338612] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1543.339226] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d00d9b-2fa0-46e9-af16-fa605cfd0016 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.349084] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2e954-17d7-efa8-cf68-9f0dcc0ea0c1/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1543.349322] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2e954-17d7-efa8-cf68-9f0dcc0ea0c1/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1543.431871] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ae667a-d58a-4d14-851e-1bac3cad15fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.442254] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca59a6e2-7d95-4a25-860f-1d6f09e52e3e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.479852] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac33acd-414c-4b44-a068-91db6edd5edb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.482892] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0769320c-60d3-454f-b15c-b0abcaa4b46d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.491767] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8920a9-cce4-4b00-819e-3c75f325ae63 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.509322] env[63241]: DEBUG nova.compute.provider_tree [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1543.835487] env[63241]: DEBUG oslo_vmware.api [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Task: {'id': task-1820388, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159197} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.839492] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1543.839712] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1543.839955] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1543.840212] env[63241]: INFO nova.compute.manager [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1543.840469] env[63241]: DEBUG oslo.service.loopingcall [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1543.840672] env[63241]: DEBUG nova.compute.manager [-] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1543.840759] env[63241]: DEBUG nova.network.neutron [-] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1544.006058] env[63241]: DEBUG nova.compute.manager [req-943f5c10-f03b-4372-b0f5-fcc6b2177dfd req-1664c3ba-2337-4af1-8ea4-82b244fd3c28 service nova] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Received event network-vif-plugged-fbb105cc-6670-49e6-8d68-5e7a6db44e8d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1544.006280] env[63241]: DEBUG oslo_concurrency.lockutils [req-943f5c10-f03b-4372-b0f5-fcc6b2177dfd req-1664c3ba-2337-4af1-8ea4-82b244fd3c28 service nova] Acquiring lock "7158c64a-5036-419b-b110-7e22c12bf3dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.006556] env[63241]: DEBUG oslo_concurrency.lockutils [req-943f5c10-f03b-4372-b0f5-fcc6b2177dfd req-1664c3ba-2337-4af1-8ea4-82b244fd3c28 service nova] Lock "7158c64a-5036-419b-b110-7e22c12bf3dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.008693] env[63241]: DEBUG oslo_concurrency.lockutils [req-943f5c10-f03b-4372-b0f5-fcc6b2177dfd req-1664c3ba-2337-4af1-8ea4-82b244fd3c28 service nova] Lock "7158c64a-5036-419b-b110-7e22c12bf3dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.008693] env[63241]: DEBUG nova.compute.manager [req-943f5c10-f03b-4372-b0f5-fcc6b2177dfd req-1664c3ba-2337-4af1-8ea4-82b244fd3c28 service nova] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] No waiting events found dispatching network-vif-plugged-fbb105cc-6670-49e6-8d68-5e7a6db44e8d {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1544.008693] env[63241]: WARNING nova.compute.manager [req-943f5c10-f03b-4372-b0f5-fcc6b2177dfd req-1664c3ba-2337-4af1-8ea4-82b244fd3c28 service nova] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Received unexpected event network-vif-plugged-fbb105cc-6670-49e6-8d68-5e7a6db44e8d for instance with vm_state building and task_state spawning. [ 1544.058390] env[63241]: DEBUG nova.scheduler.client.report [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 75 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1544.058842] env[63241]: DEBUG nova.compute.provider_tree [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 75 to 76 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1544.059186] env[63241]: DEBUG nova.compute.provider_tree [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1544.090911] env[63241]: DEBUG nova.network.neutron [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Successfully updated port: fbb105cc-6670-49e6-8d68-5e7a6db44e8d {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1544.198666] env[63241]: DEBUG nova.compute.manager [req-18a023e9-e0b1-4d78-99ad-c6846744574e req-a7326e6c-df81-4f23-992e-41105e1b4ceb service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Received event network-vif-deleted-4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1544.199056] env[63241]: INFO nova.compute.manager [req-18a023e9-e0b1-4d78-99ad-c6846744574e req-a7326e6c-df81-4f23-992e-41105e1b4ceb service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Neutron deleted interface 4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1; detaching it from the instance and deleting it from the info cache [ 1544.199243] env[63241]: DEBUG nova.network.neutron [req-18a023e9-e0b1-4d78-99ad-c6846744574e req-a7326e6c-df81-4f23-992e-41105e1b4ceb service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.567785] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.386s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.572056] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.879s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.573283] env[63241]: INFO nova.compute.claims [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1544.588258] env[63241]: INFO nova.scheduler.client.report [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleted allocations for instance efbe39fa-d581-41ac-b51c-9c94c9839d7a [ 1544.593671] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "refresh_cache-7158c64a-5036-419b-b110-7e22c12bf3dd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.593671] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "refresh_cache-7158c64a-5036-419b-b110-7e22c12bf3dd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.593860] env[63241]: DEBUG nova.network.neutron [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1544.640043] env[63241]: DEBUG nova.network.neutron [-] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.703034] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4b166f4-ea4d-41cd-a5e3-390287bd2d56 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.716957] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f357f6af-a0d6-4aec-b5ef-b2a6c67566eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.753810] env[63241]: DEBUG nova.compute.manager [req-18a023e9-e0b1-4d78-99ad-c6846744574e req-a7326e6c-df81-4f23-992e-41105e1b4ceb service nova] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Detach interface failed, port_id=4dabbba4-ee0b-48a8-bcf0-2a2e0f6da6d1, reason: Instance c3c278a8-0513-4a7f-881e-b71c70206860 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1544.799020] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquiring lock "ef36a081-6273-4397-b48f-c2bd03d0a865" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.799020] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lock "ef36a081-6273-4397-b48f-c2bd03d0a865" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.101373] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d3661eef-2096-4b49-92da-d547d056593b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "efbe39fa-d581-41ac-b51c-9c94c9839d7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.782s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.143201] env[63241]: DEBUG nova.network.neutron [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1545.145227] env[63241]: INFO nova.compute.manager [-] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Took 1.30 seconds to deallocate network for instance. [ 1545.302417] env[63241]: DEBUG nova.network.neutron [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Updating instance_info_cache with network_info: [{"id": "fbb105cc-6670-49e6-8d68-5e7a6db44e8d", "address": "fa:16:3e:57:cf:ad", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbb105cc-66", "ovs_interfaceid": "fbb105cc-6670-49e6-8d68-5e7a6db44e8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.304456] env[63241]: DEBUG nova.compute.manager [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1545.651460] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.811269] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "refresh_cache-7158c64a-5036-419b-b110-7e22c12bf3dd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1545.811269] env[63241]: DEBUG nova.compute.manager [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Instance network_info: |[{"id": "fbb105cc-6670-49e6-8d68-5e7a6db44e8d", "address": "fa:16:3e:57:cf:ad", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbb105cc-66", "ovs_interfaceid": "fbb105cc-6670-49e6-8d68-5e7a6db44e8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1545.812287] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:cf:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbb105cc-6670-49e6-8d68-5e7a6db44e8d', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1545.820166] env[63241]: DEBUG oslo.service.loopingcall [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1545.822549] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1545.823385] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f6c7328-4852-4bf0-8212-56b8d395ccb7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.839163] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.848800] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1545.848800] env[63241]: value = "task-1820389" [ 1545.848800] env[63241]: _type = "Task" [ 1545.848800] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.861048] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820389, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.023310] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19bb0bd9-19e9-4fd8-ad7f-f76d00389bf0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.031693] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e059094-8830-4ae1-b5a6-a587de96807e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.066288] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9fb34f9-b6af-4d6f-abe5-011cfe4e4542 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.070173] env[63241]: DEBUG nova.compute.manager [req-a3c128f2-bc44-42cb-af6e-3ff1cc782072 req-e4e6d996-52bf-4719-bb87-40efeae970a2 service nova] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Received event network-changed-fbb105cc-6670-49e6-8d68-5e7a6db44e8d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1546.070365] env[63241]: DEBUG nova.compute.manager [req-a3c128f2-bc44-42cb-af6e-3ff1cc782072 req-e4e6d996-52bf-4719-bb87-40efeae970a2 service nova] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Refreshing instance network info cache due to event network-changed-fbb105cc-6670-49e6-8d68-5e7a6db44e8d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1546.070584] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3c128f2-bc44-42cb-af6e-3ff1cc782072 req-e4e6d996-52bf-4719-bb87-40efeae970a2 service nova] Acquiring lock "refresh_cache-7158c64a-5036-419b-b110-7e22c12bf3dd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.070725] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3c128f2-bc44-42cb-af6e-3ff1cc782072 req-e4e6d996-52bf-4719-bb87-40efeae970a2 service nova] Acquired lock "refresh_cache-7158c64a-5036-419b-b110-7e22c12bf3dd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.070883] env[63241]: DEBUG nova.network.neutron [req-a3c128f2-bc44-42cb-af6e-3ff1cc782072 req-e4e6d996-52bf-4719-bb87-40efeae970a2 service nova] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Refreshing network info cache for port fbb105cc-6670-49e6-8d68-5e7a6db44e8d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1546.078502] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ff64e4-a8a7-4c77-8886-62f8a3fb63bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.095107] env[63241]: DEBUG nova.compute.provider_tree [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1546.360267] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820389, 'name': CreateVM_Task, 'duration_secs': 0.448705} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.360560] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1546.361333] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.361530] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.361922] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1546.362254] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d56cb3dd-b755-412e-922a-5212a74fec8d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.368466] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1546.368466] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5215ea22-9a79-6f24-b22f-414f9363de77" [ 1546.368466] env[63241]: _type = "Task" [ 1546.368466] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.377814] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5215ea22-9a79-6f24-b22f-414f9363de77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.598068] env[63241]: DEBUG nova.scheduler.client.report [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1546.786078] env[63241]: DEBUG nova.network.neutron [req-a3c128f2-bc44-42cb-af6e-3ff1cc782072 req-e4e6d996-52bf-4719-bb87-40efeae970a2 service nova] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Updated VIF entry in instance network info cache for port fbb105cc-6670-49e6-8d68-5e7a6db44e8d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1546.786469] env[63241]: DEBUG nova.network.neutron [req-a3c128f2-bc44-42cb-af6e-3ff1cc782072 req-e4e6d996-52bf-4719-bb87-40efeae970a2 service nova] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Updating instance_info_cache with network_info: [{"id": "fbb105cc-6670-49e6-8d68-5e7a6db44e8d", "address": "fa:16:3e:57:cf:ad", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbb105cc-66", "ovs_interfaceid": "fbb105cc-6670-49e6-8d68-5e7a6db44e8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.880246] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5215ea22-9a79-6f24-b22f-414f9363de77, 'name': SearchDatastore_Task, 'duration_secs': 0.016597} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.880582] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.880915] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1546.881186] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.881342] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.881527] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1546.881795] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fb6799d-be85-4fea-9b30-3ae4e3d30a71 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.892196] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1546.892400] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1546.893792] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d4bf88a-9789-4c30-a423-163c3ecd4985 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.901888] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1546.901888] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524168db-438f-8e25-8ac9-4a50ec5ea5c0" [ 1546.901888] env[63241]: _type = "Task" [ 1546.901888] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.910341] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524168db-438f-8e25-8ac9-4a50ec5ea5c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.104645] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.533s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.105230] env[63241]: DEBUG nova.compute.manager [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1547.109179] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.603s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.109179] env[63241]: DEBUG nova.objects.instance [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Lazy-loading 'resources' on Instance uuid c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1547.289486] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3c128f2-bc44-42cb-af6e-3ff1cc782072 req-e4e6d996-52bf-4719-bb87-40efeae970a2 service nova] Releasing lock "refresh_cache-7158c64a-5036-419b-b110-7e22c12bf3dd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.415916] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524168db-438f-8e25-8ac9-4a50ec5ea5c0, 'name': SearchDatastore_Task, 'duration_secs': 0.010889} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.416786] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dec79a89-5786-4587-b60d-2a5f7bf9c5ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.423184] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1547.423184] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c07cca-6730-eee3-b7af-f76d3fb60595" [ 1547.423184] env[63241]: _type = "Task" [ 1547.423184] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.432839] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c07cca-6730-eee3-b7af-f76d3fb60595, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.614999] env[63241]: DEBUG nova.compute.utils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1547.616707] env[63241]: DEBUG nova.compute.manager [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1547.616916] env[63241]: DEBUG nova.network.neutron [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1547.656541] env[63241]: DEBUG nova.policy [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22e64075ae2a435fa4e372cdb9cd0ad2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f89b38fc89ac4f039a89fb9bf42dbc5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1547.936753] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c07cca-6730-eee3-b7af-f76d3fb60595, 'name': SearchDatastore_Task, 'duration_secs': 0.011783} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.937818] env[63241]: DEBUG nova.network.neutron [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Successfully created port: 98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1547.939571] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.941024] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 7158c64a-5036-419b-b110-7e22c12bf3dd/7158c64a-5036-419b-b110-7e22c12bf3dd.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1547.941024] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-034e16e2-2a3b-4922-906c-3019515d89ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.949892] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1547.949892] env[63241]: value = "task-1820390" [ 1547.949892] env[63241]: _type = "Task" [ 1547.949892] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.958765] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820390, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.043471] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a9702b-4b5e-4066-835b-0c3693b863ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.050172] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990ac295-1152-4e32-8729-a708cfac633e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.086583] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4b5118-b7f4-4c38-b6eb-11dbc56a82ae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.096196] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5586077-e00a-4ad7-9efe-52b40c6571a4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.115053] env[63241]: DEBUG nova.compute.provider_tree [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1548.120397] env[63241]: DEBUG nova.compute.manager [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1548.461446] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820390, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.617015] env[63241]: DEBUG nova.scheduler.client.report [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1548.961498] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820390, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572083} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.961833] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 7158c64a-5036-419b-b110-7e22c12bf3dd/7158c64a-5036-419b-b110-7e22c12bf3dd.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1548.962062] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1548.962375] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98e5e1a1-4427-4cad-85f7-2da7a12f6215 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.970518] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1548.970518] env[63241]: value = "task-1820391" [ 1548.970518] env[63241]: _type = "Task" [ 1548.970518] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.979844] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820391, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.123213] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.015s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.125857] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.849s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.127384] env[63241]: INFO nova.compute.claims [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1549.132917] env[63241]: DEBUG nova.compute.manager [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1549.156135] env[63241]: DEBUG nova.virt.hardware [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1549.156135] env[63241]: DEBUG nova.virt.hardware [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1549.156135] env[63241]: DEBUG nova.virt.hardware [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1549.156135] env[63241]: DEBUG nova.virt.hardware [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1549.156687] env[63241]: DEBUG nova.virt.hardware [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1549.156687] env[63241]: DEBUG nova.virt.hardware [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1549.157387] env[63241]: DEBUG nova.virt.hardware [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1549.157635] env[63241]: DEBUG nova.virt.hardware [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1549.159135] env[63241]: DEBUG nova.virt.hardware [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1549.159135] env[63241]: DEBUG nova.virt.hardware [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1549.159135] env[63241]: DEBUG nova.virt.hardware [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1549.159379] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1979892d-bd32-40c8-b7e8-993f1341db37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.163440] env[63241]: INFO nova.scheduler.client.report [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Deleted allocations for instance c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3 [ 1549.171269] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5a19bc-6f59-44df-87db-73049910d5d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.451778] env[63241]: DEBUG nova.compute.manager [req-b8773d67-a29e-422a-bd34-9d7ea7f70927 req-8e677018-987a-4007-b80b-10a9f6ca50fb service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Received event network-vif-plugged-98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1549.452026] env[63241]: DEBUG oslo_concurrency.lockutils [req-b8773d67-a29e-422a-bd34-9d7ea7f70927 req-8e677018-987a-4007-b80b-10a9f6ca50fb service nova] Acquiring lock "e3842404-2c80-4fa9-b0c9-c58c484845a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.452338] env[63241]: DEBUG oslo_concurrency.lockutils [req-b8773d67-a29e-422a-bd34-9d7ea7f70927 req-8e677018-987a-4007-b80b-10a9f6ca50fb service nova] Lock "e3842404-2c80-4fa9-b0c9-c58c484845a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.452576] env[63241]: DEBUG oslo_concurrency.lockutils [req-b8773d67-a29e-422a-bd34-9d7ea7f70927 req-8e677018-987a-4007-b80b-10a9f6ca50fb service nova] Lock "e3842404-2c80-4fa9-b0c9-c58c484845a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.452817] env[63241]: DEBUG nova.compute.manager [req-b8773d67-a29e-422a-bd34-9d7ea7f70927 req-8e677018-987a-4007-b80b-10a9f6ca50fb service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] No waiting events found dispatching network-vif-plugged-98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1549.452965] env[63241]: WARNING nova.compute.manager [req-b8773d67-a29e-422a-bd34-9d7ea7f70927 req-8e677018-987a-4007-b80b-10a9f6ca50fb service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Received unexpected event network-vif-plugged-98619b24-0318-422e-90bb-ed8db3309905 for instance with vm_state building and task_state spawning. [ 1549.481197] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820391, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118759} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.481573] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1549.482342] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e8ab9f-de2d-44db-8d6c-faea93a7b548 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.506890] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 7158c64a-5036-419b-b110-7e22c12bf3dd/7158c64a-5036-419b-b110-7e22c12bf3dd.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1549.507931] env[63241]: DEBUG nova.network.neutron [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Successfully updated port: 98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1549.509039] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e56529fb-41e7-4b0b-a747-3d6431501e3c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.530744] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1549.530744] env[63241]: value = "task-1820392" [ 1549.530744] env[63241]: _type = "Task" [ 1549.530744] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.539473] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820392, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.674084] env[63241]: DEBUG oslo_concurrency.lockutils [None req-be86edd0-50b6-41c2-811f-8e0283a80d7a tempest-ServersTestBootFromVolume-1074118623 tempest-ServersTestBootFromVolume-1074118623-project-member] Lock "c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.022s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.024087] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquiring lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.024275] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquired lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.024438] env[63241]: DEBUG nova.network.neutron [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1550.041794] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820392, 'name': ReconfigVM_Task, 'duration_secs': 0.440998} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.042080] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 7158c64a-5036-419b-b110-7e22c12bf3dd/7158c64a-5036-419b-b110-7e22c12bf3dd.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1550.042731] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55e7683e-edb7-4b36-9c32-2b77d784b41e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.048526] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1550.048526] env[63241]: value = "task-1820393" [ 1550.048526] env[63241]: _type = "Task" [ 1550.048526] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.057327] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820393, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.559471] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820393, 'name': Rename_Task, 'duration_secs': 0.215554} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.562749] env[63241]: DEBUG nova.network.neutron [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1550.565036] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1550.565036] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1fbfd0c5-9409-4b8c-8041-1ecfb2d27f56 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.571813] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1550.571813] env[63241]: value = "task-1820394" [ 1550.571813] env[63241]: _type = "Task" [ 1550.571813] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.579765] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820394, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.615170] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2e954-17d7-efa8-cf68-9f0dcc0ea0c1/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1550.615170] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61db8366-bfbd-48a3-a582-71c10d92117a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.623050] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2e954-17d7-efa8-cf68-9f0dcc0ea0c1/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1550.623402] env[63241]: ERROR oslo_vmware.rw_handles [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2e954-17d7-efa8-cf68-9f0dcc0ea0c1/disk-0.vmdk due to incomplete transfer. [ 1550.623737] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e455e65d-a5b7-4924-9415-a11c5efad8db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.630833] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2e954-17d7-efa8-cf68-9f0dcc0ea0c1/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1550.631525] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Uploaded image 9ad68a25-776e-4c94-b8a5-166e8724b00c to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1550.633826] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1550.634211] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6a4ed4a6-c161-4fc1-8496-e05f10fd90ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.640589] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1550.640589] env[63241]: value = "task-1820395" [ 1550.640589] env[63241]: _type = "Task" [ 1550.640589] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.649905] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820395, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.714466] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b84e17-7048-41df-9e2d-9a88a6cb5d0f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.723975] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb5e1c6-05a3-44a9-bc6f-015865a909cc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.763263] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6480329-f50c-48bc-a9d0-cbc7fa16d0fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.772574] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc0ff81-af17-4812-9443-29f3ee18eb98 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.786824] env[63241]: DEBUG nova.compute.provider_tree [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1550.815840] env[63241]: DEBUG nova.network.neutron [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updating instance_info_cache with network_info: [{"id": "98619b24-0318-422e-90bb-ed8db3309905", "address": "fa:16:3e:2b:bb:d0", "network": {"id": "7b2cb854-43f8-4d73-b16a-16b0e34b29e4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1952186645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89b38fc89ac4f039a89fb9bf42dbc5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98619b24-03", "ovs_interfaceid": "98619b24-0318-422e-90bb-ed8db3309905", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.082271] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820394, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.152637] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820395, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.309211] env[63241]: ERROR nova.scheduler.client.report [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [req-16bca529-972d-4c64-aab3-9221455576e7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-16bca529-972d-4c64-aab3-9221455576e7"}]} [ 1551.317877] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Releasing lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.318194] env[63241]: DEBUG nova.compute.manager [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Instance network_info: |[{"id": "98619b24-0318-422e-90bb-ed8db3309905", "address": "fa:16:3e:2b:bb:d0", "network": {"id": "7b2cb854-43f8-4d73-b16a-16b0e34b29e4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1952186645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89b38fc89ac4f039a89fb9bf42dbc5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98619b24-03", "ovs_interfaceid": "98619b24-0318-422e-90bb-ed8db3309905", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1551.318605] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:bb:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98619b24-0318-422e-90bb-ed8db3309905', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1551.326673] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Creating folder: Project (f89b38fc89ac4f039a89fb9bf42dbc5d). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1551.326963] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac8864a1-8533-4d4e-be7c-8af9d52ea080 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.335170] env[63241]: DEBUG nova.scheduler.client.report [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1551.339600] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Created folder: Project (f89b38fc89ac4f039a89fb9bf42dbc5d) in parent group-v376927. [ 1551.339600] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Creating folder: Instances. Parent ref: group-v377080. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1551.339766] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72cd42dc-2f8a-43f4-9054-c4ff4af7b329 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.348026] env[63241]: DEBUG nova.scheduler.client.report [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1551.348266] env[63241]: DEBUG nova.compute.provider_tree [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1551.351864] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Created folder: Instances in parent group-v377080. [ 1551.352211] env[63241]: DEBUG oslo.service.loopingcall [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1551.352645] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1551.352728] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e4d79fe-4865-4e1e-9914-a8303be876b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.368615] env[63241]: DEBUG nova.scheduler.client.report [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1551.375843] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1551.375843] env[63241]: value = "task-1820398" [ 1551.375843] env[63241]: _type = "Task" [ 1551.375843] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.384793] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820398, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.393860] env[63241]: DEBUG nova.scheduler.client.report [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1551.549324] env[63241]: DEBUG nova.compute.manager [req-793f5375-885f-49ae-a356-5fed4e2441c2 req-1db1ff89-0bfa-4adc-8dc1-c76f5610d3b2 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Received event network-changed-98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1551.549477] env[63241]: DEBUG nova.compute.manager [req-793f5375-885f-49ae-a356-5fed4e2441c2 req-1db1ff89-0bfa-4adc-8dc1-c76f5610d3b2 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Refreshing instance network info cache due to event network-changed-98619b24-0318-422e-90bb-ed8db3309905. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1551.549720] env[63241]: DEBUG oslo_concurrency.lockutils [req-793f5375-885f-49ae-a356-5fed4e2441c2 req-1db1ff89-0bfa-4adc-8dc1-c76f5610d3b2 service nova] Acquiring lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.549817] env[63241]: DEBUG oslo_concurrency.lockutils [req-793f5375-885f-49ae-a356-5fed4e2441c2 req-1db1ff89-0bfa-4adc-8dc1-c76f5610d3b2 service nova] Acquired lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.550147] env[63241]: DEBUG nova.network.neutron [req-793f5375-885f-49ae-a356-5fed4e2441c2 req-1db1ff89-0bfa-4adc-8dc1-c76f5610d3b2 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Refreshing network info cache for port 98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1551.587715] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820394, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.657868] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820395, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.823828] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527fd71b-9892-48bc-9460-ab4d5f5361d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.831754] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08debaf-4028-4385-b258-7934230544d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.863449] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331dc97a-93c8-4e36-9efb-632706122aa7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.871330] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a896bcc9-6ef3-4049-8c93-df8052adc953 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.888161] env[63241]: DEBUG nova.compute.provider_tree [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1551.895013] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820398, 'name': CreateVM_Task, 'duration_secs': 0.390934} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.895180] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1551.895940] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.896108] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.896419] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1551.896657] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afc94999-e4da-45c3-8933-ef1b2c02bd3c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.902222] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1551.902222] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5227a736-2e5a-ad71-b712-df7d4b099f59" [ 1551.902222] env[63241]: _type = "Task" [ 1551.902222] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.910816] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5227a736-2e5a-ad71-b712-df7d4b099f59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.084548] env[63241]: DEBUG oslo_vmware.api [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820394, 'name': PowerOnVM_Task, 'duration_secs': 1.193702} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.084719] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1552.084922] env[63241]: INFO nova.compute.manager [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Took 8.88 seconds to spawn the instance on the hypervisor. [ 1552.085116] env[63241]: DEBUG nova.compute.manager [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1552.086163] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1813f42-2312-4ed7-9ff2-81c8662a1908 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.154831] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820395, 'name': Destroy_Task, 'duration_secs': 1.116479} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.155191] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Destroyed the VM [ 1552.155450] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1552.155724] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-129b518a-cb1f-430b-b7b3-a2d118af35ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.163580] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1552.163580] env[63241]: value = "task-1820399" [ 1552.163580] env[63241]: _type = "Task" [ 1552.163580] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.173136] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820399, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.399612] env[63241]: DEBUG nova.network.neutron [req-793f5375-885f-49ae-a356-5fed4e2441c2 req-1db1ff89-0bfa-4adc-8dc1-c76f5610d3b2 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updated VIF entry in instance network info cache for port 98619b24-0318-422e-90bb-ed8db3309905. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1552.400208] env[63241]: DEBUG nova.network.neutron [req-793f5375-885f-49ae-a356-5fed4e2441c2 req-1db1ff89-0bfa-4adc-8dc1-c76f5610d3b2 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updating instance_info_cache with network_info: [{"id": "98619b24-0318-422e-90bb-ed8db3309905", "address": "fa:16:3e:2b:bb:d0", "network": {"id": "7b2cb854-43f8-4d73-b16a-16b0e34b29e4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1952186645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89b38fc89ac4f039a89fb9bf42dbc5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98619b24-03", "ovs_interfaceid": "98619b24-0318-422e-90bb-ed8db3309905", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.415036] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5227a736-2e5a-ad71-b712-df7d4b099f59, 'name': SearchDatastore_Task, 'duration_secs': 0.009655} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.415036] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.415212] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1552.415351] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.415506] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.415688] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1552.416144] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e60449a-cc08-4988-ab1d-5d488e28d883 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.426023] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1552.426274] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1552.427561] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4eb46c2-6cbf-4dfe-816d-f2265484b4e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.432744] env[63241]: DEBUG nova.scheduler.client.report [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 77 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1552.432744] env[63241]: DEBUG nova.compute.provider_tree [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 77 to 78 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1552.432978] env[63241]: DEBUG nova.compute.provider_tree [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1552.438220] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1552.438220] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b5d06a-0425-d682-487b-047f342f955c" [ 1552.438220] env[63241]: _type = "Task" [ 1552.438220] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.448234] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b5d06a-0425-d682-487b-047f342f955c, 'name': SearchDatastore_Task, 'duration_secs': 0.010964} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.449808] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad548bf7-b040-4ca8-b7bc-c80a51663456 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.455700] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1552.455700] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ab91d0-8d5a-7887-3f25-dc659a4984be" [ 1552.455700] env[63241]: _type = "Task" [ 1552.455700] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.467148] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ab91d0-8d5a-7887-3f25-dc659a4984be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.607460] env[63241]: INFO nova.compute.manager [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Took 51.41 seconds to build instance. [ 1552.679434] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820399, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.903090] env[63241]: DEBUG oslo_concurrency.lockutils [req-793f5375-885f-49ae-a356-5fed4e2441c2 req-1db1ff89-0bfa-4adc-8dc1-c76f5610d3b2 service nova] Releasing lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.940091] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.814s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.940272] env[63241]: DEBUG nova.compute.manager [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1552.942922] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.317s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.943156] env[63241]: DEBUG nova.objects.instance [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'resources' on Instance uuid 27177719-5090-43de-9bca-6db6bebab7b4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1552.966998] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ab91d0-8d5a-7887-3f25-dc659a4984be, 'name': SearchDatastore_Task, 'duration_secs': 0.009284} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.967206] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.967454] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e3842404-2c80-4fa9-b0c9-c58c484845a2/e3842404-2c80-4fa9-b0c9-c58c484845a2.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1552.967761] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4a78d14-5a51-4685-a207-f4d955dcb8b1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.975121] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1552.975121] env[63241]: value = "task-1820400" [ 1552.975121] env[63241]: _type = "Task" [ 1552.975121] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.983778] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820400, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.110353] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ce4f48d8-4e15-4ff4-bc0a-db80f4694e8a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "7158c64a-5036-419b-b110-7e22c12bf3dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.920s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.177031] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820399, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.446658] env[63241]: DEBUG nova.compute.utils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1553.453145] env[63241]: DEBUG nova.compute.manager [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1553.453145] env[63241]: DEBUG nova.network.neutron [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1553.489995] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820400, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.677058] env[63241]: DEBUG oslo_vmware.api [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820399, 'name': RemoveSnapshot_Task, 'duration_secs': 1.135818} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.679967] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1553.680430] env[63241]: INFO nova.compute.manager [None req-c524ea3c-191a-436a-bf16-08693edddb60 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Took 15.04 seconds to snapshot the instance on the hypervisor. [ 1553.706868] env[63241]: DEBUG nova.policy [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74bb38a9180e49efa8e14396d5d04d8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b874b5f558e48e9a83b27e69d262106', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1553.889437] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020fac8b-fc73-4012-8b64-3ce8309c4a0b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.898254] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11289ad7-b2bc-4c43-9c37-a83f9187e8d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.929094] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8443a085-97b7-4fad-bc44-b6e31d41b850 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.936683] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a4c92d-bcb5-4eb5-bac7-57da90a5fb68 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.956043] env[63241]: DEBUG nova.compute.manager [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1553.959527] env[63241]: DEBUG nova.compute.provider_tree [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1553.986431] env[63241]: DEBUG nova.network.neutron [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Successfully created port: 8ea61e21-b365-48ae-a2a2-7197b4b7151d {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1553.993347] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820400, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523663} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.993604] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e3842404-2c80-4fa9-b0c9-c58c484845a2/e3842404-2c80-4fa9-b0c9-c58c484845a2.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1553.993811] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1553.994073] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c61c4676-4347-4b3f-a1d9-6da3ab0ad626 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.001510] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1554.001510] env[63241]: value = "task-1820401" [ 1554.001510] env[63241]: _type = "Task" [ 1554.001510] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.010929] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820401, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.084560] env[63241]: DEBUG nova.compute.manager [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1554.087020] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629cce8d-7cf3-4a43-9225-4622dc5dd8e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.272336] env[63241]: DEBUG nova.network.neutron [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Successfully created port: 9e415974-1e27-45d2-acb0-cdef98386304 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1554.463529] env[63241]: DEBUG nova.scheduler.client.report [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1554.517196] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072693} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.517666] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1554.518702] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0525b6d4-06fb-4b46-9ce8-df7a086578e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.543983] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] e3842404-2c80-4fa9-b0c9-c58c484845a2/e3842404-2c80-4fa9-b0c9-c58c484845a2.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1554.544593] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9444a03-2e51-4045-b46c-4587d2b95167 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.570187] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1554.570187] env[63241]: value = "task-1820402" [ 1554.570187] env[63241]: _type = "Task" [ 1554.570187] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.577939] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.597604] env[63241]: INFO nova.compute.manager [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] instance snapshotting [ 1554.600500] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29cc212-4935-434c-b453-33a7f87be9a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.621567] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba349f86-c8f0-4161-8eb9-da31e7ebfbce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.973022] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.028s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.975088] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 49.806s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.975415] env[63241]: DEBUG nova.objects.instance [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1554.979390] env[63241]: DEBUG nova.compute.manager [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1555.006275] env[63241]: INFO nova.scheduler.client.report [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleted allocations for instance 27177719-5090-43de-9bca-6db6bebab7b4 [ 1555.011260] env[63241]: DEBUG nova.virt.hardware [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1555.011372] env[63241]: DEBUG nova.virt.hardware [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1555.012024] env[63241]: DEBUG nova.virt.hardware [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1555.012024] env[63241]: DEBUG nova.virt.hardware [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1555.012024] env[63241]: DEBUG nova.virt.hardware [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1555.012024] env[63241]: DEBUG nova.virt.hardware [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1555.014351] env[63241]: DEBUG nova.virt.hardware [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1555.014527] env[63241]: DEBUG nova.virt.hardware [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1555.014705] env[63241]: DEBUG nova.virt.hardware [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1555.014878] env[63241]: DEBUG nova.virt.hardware [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1555.015051] env[63241]: DEBUG nova.virt.hardware [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1555.015934] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af8bc26-f22e-4a50-952a-c2d83687fd4f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.029858] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1348d0-9a88-4dd4-8071-03f8d5c3c612 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.081220] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820402, 'name': ReconfigVM_Task, 'duration_secs': 0.271121} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.081521] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Reconfigured VM instance instance-00000036 to attach disk [datastore1] e3842404-2c80-4fa9-b0c9-c58c484845a2/e3842404-2c80-4fa9-b0c9-c58c484845a2.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1555.082149] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be40b1be-4e36-42f9-97ab-05dc19327416 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.088926] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1555.088926] env[63241]: value = "task-1820403" [ 1555.088926] env[63241]: _type = "Task" [ 1555.088926] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.098786] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820403, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.132774] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1555.133131] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d12f5adc-aac4-4858-89c6-dba407aaa1ef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.140743] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1555.140743] env[63241]: value = "task-1820404" [ 1555.140743] env[63241]: _type = "Task" [ 1555.140743] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.151580] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820404, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.530335] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3ee9c14-5224-4b88-a266-65112673e7ac tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "27177719-5090-43de-9bca-6db6bebab7b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.180s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.604186] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820403, 'name': Rename_Task, 'duration_secs': 0.143235} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.604186] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1555.604186] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2e77dc9-6ba3-404a-8b00-3c4a06142146 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.616021] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1555.616021] env[63241]: value = "task-1820405" [ 1555.616021] env[63241]: _type = "Task" [ 1555.616021] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.624893] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820405, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.651728] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820404, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.925568] env[63241]: DEBUG nova.compute.manager [req-0b27ff90-2422-4a24-b66e-6b9869ddfbf5 req-45a8a9de-6b8d-491f-9ce2-cc1fa2b58453 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Received event network-vif-plugged-8ea61e21-b365-48ae-a2a2-7197b4b7151d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1555.925793] env[63241]: DEBUG oslo_concurrency.lockutils [req-0b27ff90-2422-4a24-b66e-6b9869ddfbf5 req-45a8a9de-6b8d-491f-9ce2-cc1fa2b58453 service nova] Acquiring lock "943100f1-e702-4869-8c19-d81d39712ac5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.927155] env[63241]: DEBUG oslo_concurrency.lockutils [req-0b27ff90-2422-4a24-b66e-6b9869ddfbf5 req-45a8a9de-6b8d-491f-9ce2-cc1fa2b58453 service nova] Lock "943100f1-e702-4869-8c19-d81d39712ac5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.927155] env[63241]: DEBUG oslo_concurrency.lockutils [req-0b27ff90-2422-4a24-b66e-6b9869ddfbf5 req-45a8a9de-6b8d-491f-9ce2-cc1fa2b58453 service nova] Lock "943100f1-e702-4869-8c19-d81d39712ac5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.927306] env[63241]: DEBUG nova.compute.manager [req-0b27ff90-2422-4a24-b66e-6b9869ddfbf5 req-45a8a9de-6b8d-491f-9ce2-cc1fa2b58453 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] No waiting events found dispatching network-vif-plugged-8ea61e21-b365-48ae-a2a2-7197b4b7151d {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1555.927501] env[63241]: WARNING nova.compute.manager [req-0b27ff90-2422-4a24-b66e-6b9869ddfbf5 req-45a8a9de-6b8d-491f-9ce2-cc1fa2b58453 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Received unexpected event network-vif-plugged-8ea61e21-b365-48ae-a2a2-7197b4b7151d for instance with vm_state building and task_state spawning. [ 1555.990905] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e3c50882-1504-47f1-8504-808c0e6b22bb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.993086] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 49.103s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.993086] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.993086] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1555.993086] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.691s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.993086] env[63241]: DEBUG nova.objects.instance [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lazy-loading 'resources' on Instance uuid e3df56a7-eb82-4297-8aa3-f77c0380b6ec {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1555.996389] env[63241]: DEBUG nova.network.neutron [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Successfully updated port: 8ea61e21-b365-48ae-a2a2-7197b4b7151d {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1556.004137] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5439eb6a-9e1e-4386-8fc9-b797d77ef71a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.019659] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1e0236-5030-45c1-a6a5-c64510cc8af8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.038236] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b0b712-30c1-419e-be11-fed0a06e8d38 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.046310] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59667dac-e2b9-44dc-a09b-3248230d7e16 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.081622] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179569MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1556.081863] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.125556] env[63241]: DEBUG oslo_vmware.api [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820405, 'name': PowerOnVM_Task, 'duration_secs': 0.449554} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.126083] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1556.126184] env[63241]: INFO nova.compute.manager [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Took 6.99 seconds to spawn the instance on the hypervisor. [ 1556.126299] env[63241]: DEBUG nova.compute.manager [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1556.127138] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf2ebdf-2cba-4fa1-8315-cbaf1342065e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.151590] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820404, 'name': CreateSnapshot_Task, 'duration_secs': 0.96979} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.151816] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1556.153709] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e891d42-61ee-4b9c-a1da-efbe181ae16d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.650122] env[63241]: INFO nova.compute.manager [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Took 53.98 seconds to build instance. [ 1556.676151] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1556.679764] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-13854395-f668-4e01-97f3-94510a39ae23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.693187] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1556.693187] env[63241]: value = "task-1820406" [ 1556.693187] env[63241]: _type = "Task" [ 1556.693187] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.702777] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820406, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.041444] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e738c67c-88cf-47f9-bbe0-29897cd25276 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.051072] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e5728f-03b6-435a-b562-cfc3d2adcfe4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.091641] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b531ca5-7494-41a9-b139-2ae7138ccc5c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.099900] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ce0040-c274-40f3-8629-14e1571594a6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.117274] env[63241]: DEBUG nova.compute.provider_tree [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1557.152933] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a190762a-c866-4f2b-81b0-a98e16d480c3 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lock "e3842404-2c80-4fa9-b0c9-c58c484845a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.497s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.182239] env[63241]: DEBUG nova.compute.manager [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1557.182239] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2d429e-1127-4e0e-b5ac-5e1f4333ea51 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.206495] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820406, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.622512] env[63241]: DEBUG nova.scheduler.client.report [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1557.696334] env[63241]: INFO nova.compute.manager [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] instance snapshotting [ 1557.703416] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f178827-ae96-4bc1-b960-7ba7dcb6bd61 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.713328] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820406, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.731185] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8b5f90-ba90-4dd2-a18d-5466ed4c546e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.128388] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.135s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.131758] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.753s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.132493] env[63241]: DEBUG nova.objects.instance [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lazy-loading 'resources' on Instance uuid a1f24cfe-88f0-4e73-9ade-2dcf907848a1 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1558.165151] env[63241]: INFO nova.scheduler.client.report [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted allocations for instance e3df56a7-eb82-4297-8aa3-f77c0380b6ec [ 1558.207043] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820406, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.243376] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1558.243944] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-468e2962-af37-43ac-81dc-7d94eee8a735 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.252142] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1558.252142] env[63241]: value = "task-1820407" [ 1558.252142] env[63241]: _type = "Task" [ 1558.252142] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.263813] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820407, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.327640] env[63241]: DEBUG nova.network.neutron [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Successfully updated port: 9e415974-1e27-45d2-acb0-cdef98386304 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1558.389040] env[63241]: DEBUG nova.compute.manager [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Received event network-changed-8ea61e21-b365-48ae-a2a2-7197b4b7151d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1558.389247] env[63241]: DEBUG nova.compute.manager [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Refreshing instance network info cache due to event network-changed-8ea61e21-b365-48ae-a2a2-7197b4b7151d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1558.389440] env[63241]: DEBUG oslo_concurrency.lockutils [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] Acquiring lock "refresh_cache-943100f1-e702-4869-8c19-d81d39712ac5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1558.389584] env[63241]: DEBUG oslo_concurrency.lockutils [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] Acquired lock "refresh_cache-943100f1-e702-4869-8c19-d81d39712ac5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.389781] env[63241]: DEBUG nova.network.neutron [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Refreshing network info cache for port 8ea61e21-b365-48ae-a2a2-7197b4b7151d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1558.614125] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "31e84206-e583-4610-969e-2ccae2d0b206" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.614125] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "31e84206-e583-4610-969e-2ccae2d0b206" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.679279] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b97e7123-adeb-41c9-9a6b-2561e6494951 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "e3df56a7-eb82-4297-8aa3-f77c0380b6ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.306s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.710495] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820406, 'name': CloneVM_Task, 'duration_secs': 1.834522} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.710495] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Created linked-clone VM from snapshot [ 1558.711570] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b6f7f9-a6cd-4b34-bf87-254113397d1e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.725192] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Uploading image 329bc4d0-0ed9-4ffe-a843-80beee7f7bfa {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1558.762009] env[63241]: DEBUG oslo_vmware.rw_handles [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1558.762009] env[63241]: value = "vm-377084" [ 1558.762009] env[63241]: _type = "VirtualMachine" [ 1558.762009] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1558.762716] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1ef1154a-4c86-49c1-903e-5322fe6113dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.769905] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820407, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.778774] env[63241]: DEBUG oslo_vmware.rw_handles [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lease: (returnval){ [ 1558.778774] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f755be-fde9-e8f6-6ef2-08d57cc1997c" [ 1558.778774] env[63241]: _type = "HttpNfcLease" [ 1558.778774] env[63241]: } obtained for exporting VM: (result){ [ 1558.778774] env[63241]: value = "vm-377084" [ 1558.778774] env[63241]: _type = "VirtualMachine" [ 1558.778774] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1558.779107] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the lease: (returnval){ [ 1558.779107] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f755be-fde9-e8f6-6ef2-08d57cc1997c" [ 1558.779107] env[63241]: _type = "HttpNfcLease" [ 1558.779107] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1558.789155] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1558.789155] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f755be-fde9-e8f6-6ef2-08d57cc1997c" [ 1558.789155] env[63241]: _type = "HttpNfcLease" [ 1558.789155] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1558.830115] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "refresh_cache-943100f1-e702-4869-8c19-d81d39712ac5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1558.925650] env[63241]: DEBUG nova.network.neutron [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1559.000302] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "9d301157-6870-4452-9ae6-0d45c4338886" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.000561] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "9d301157-6870-4452-9ae6-0d45c4338886" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.016123] env[63241]: DEBUG nova.network.neutron [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.088615] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbedd0e-a2d9-4a50-acdd-bd712f6a4a9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.096721] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db471db-337b-4760-9abc-41407b14de92 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.128653] env[63241]: DEBUG nova.compute.manager [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1559.132320] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0a6267-77b6-4328-b76f-430f014fc945 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.141743] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dada89b-6efd-4d54-9645-65828319ba4f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.159315] env[63241]: DEBUG nova.compute.provider_tree [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.265646] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820407, 'name': CreateSnapshot_Task, 'duration_secs': 0.795647} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.265866] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1559.266638] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66d8f7c-d370-4673-b2f3-f2ec8b54784c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.286289] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1559.286289] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f755be-fde9-e8f6-6ef2-08d57cc1997c" [ 1559.286289] env[63241]: _type = "HttpNfcLease" [ 1559.286289] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1559.286575] env[63241]: DEBUG oslo_vmware.rw_handles [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1559.286575] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f755be-fde9-e8f6-6ef2-08d57cc1997c" [ 1559.286575] env[63241]: _type = "HttpNfcLease" [ 1559.286575] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1559.287339] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fa7787-f046-4efe-86c0-f0fb93a6251f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.294977] env[63241]: DEBUG oslo_vmware.rw_handles [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cea0fe-a0ba-d5a6-c302-0fbc0e86c7d2/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1559.295171] env[63241]: DEBUG oslo_vmware.rw_handles [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cea0fe-a0ba-d5a6-c302-0fbc0e86c7d2/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1559.505825] env[63241]: DEBUG nova.compute.manager [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1559.524168] env[63241]: DEBUG oslo_concurrency.lockutils [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] Releasing lock "refresh_cache-943100f1-e702-4869-8c19-d81d39712ac5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.524356] env[63241]: DEBUG nova.compute.manager [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Received event network-changed-98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1559.524536] env[63241]: DEBUG nova.compute.manager [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Refreshing instance network info cache due to event network-changed-98619b24-0318-422e-90bb-ed8db3309905. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1559.563801] env[63241]: DEBUG oslo_concurrency.lockutils [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] Acquiring lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.563801] env[63241]: DEBUG oslo_concurrency.lockutils [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] Acquired lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.563801] env[63241]: DEBUG nova.network.neutron [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Refreshing network info cache for port 98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1559.563801] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquired lock "refresh_cache-943100f1-e702-4869-8c19-d81d39712ac5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.563801] env[63241]: DEBUG nova.network.neutron [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1559.563801] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7ed48330-1655-46d5-a21b-fb3d253a14ef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.665443] env[63241]: DEBUG nova.scheduler.client.report [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1559.686916] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.787174] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1559.787960] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0a6531e7-9e90-497d-8bab-fa61bb73b064 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.801037] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1559.801037] env[63241]: value = "task-1820409" [ 1559.801037] env[63241]: _type = "Task" [ 1559.801037] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.810938] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820409, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.027847] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.068956] env[63241]: DEBUG nova.network.neutron [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1560.170128] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.039s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.172457] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.248s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.174776] env[63241]: INFO nova.compute.claims [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1560.203986] env[63241]: INFO nova.scheduler.client.report [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Deleted allocations for instance a1f24cfe-88f0-4e73-9ade-2dcf907848a1 [ 1560.312334] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820409, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.334145] env[63241]: DEBUG nova.network.neutron [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updated VIF entry in instance network info cache for port 98619b24-0318-422e-90bb-ed8db3309905. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1560.334631] env[63241]: DEBUG nova.network.neutron [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updating instance_info_cache with network_info: [{"id": "98619b24-0318-422e-90bb-ed8db3309905", "address": "fa:16:3e:2b:bb:d0", "network": {"id": "7b2cb854-43f8-4d73-b16a-16b0e34b29e4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1952186645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89b38fc89ac4f039a89fb9bf42dbc5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98619b24-03", "ovs_interfaceid": "98619b24-0318-422e-90bb-ed8db3309905", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.422089] env[63241]: DEBUG nova.compute.manager [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Received event network-vif-plugged-9e415974-1e27-45d2-acb0-cdef98386304 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1560.422353] env[63241]: DEBUG oslo_concurrency.lockutils [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] Acquiring lock "943100f1-e702-4869-8c19-d81d39712ac5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.422969] env[63241]: DEBUG oslo_concurrency.lockutils [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] Lock "943100f1-e702-4869-8c19-d81d39712ac5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.423419] env[63241]: DEBUG oslo_concurrency.lockutils [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] Lock "943100f1-e702-4869-8c19-d81d39712ac5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.423632] env[63241]: DEBUG nova.compute.manager [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] No waiting events found dispatching network-vif-plugged-9e415974-1e27-45d2-acb0-cdef98386304 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1560.423808] env[63241]: WARNING nova.compute.manager [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Received unexpected event network-vif-plugged-9e415974-1e27-45d2-acb0-cdef98386304 for instance with vm_state building and task_state spawning. [ 1560.423976] env[63241]: DEBUG nova.compute.manager [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Received event network-changed-9e415974-1e27-45d2-acb0-cdef98386304 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1560.424193] env[63241]: DEBUG nova.compute.manager [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Refreshing instance network info cache due to event network-changed-9e415974-1e27-45d2-acb0-cdef98386304. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1560.424401] env[63241]: DEBUG oslo_concurrency.lockutils [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] Acquiring lock "refresh_cache-943100f1-e702-4869-8c19-d81d39712ac5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.521696] env[63241]: DEBUG nova.network.neutron [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Updating instance_info_cache with network_info: [{"id": "8ea61e21-b365-48ae-a2a2-7197b4b7151d", "address": "fa:16:3e:40:a4:15", "network": {"id": "703ff09e-e307-4763-aeb5-a229ef649bc3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1085824559", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.167", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ea61e21-b3", "ovs_interfaceid": "8ea61e21-b365-48ae-a2a2-7197b4b7151d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9e415974-1e27-45d2-acb0-cdef98386304", "address": "fa:16:3e:35:f2:29", "network": {"id": "6df5f181-71f0-4624-9995-e0c8715ebd0a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-876749984", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e415974-1e", "ovs_interfaceid": "9e415974-1e27-45d2-acb0-cdef98386304", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.717635] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a05be1be-0095-4c78-bc4a-6549e45b2534 tempest-SecurityGroupsTestJSON-1725646800 tempest-SecurityGroupsTestJSON-1725646800-project-member] Lock "a1f24cfe-88f0-4e73-9ade-2dcf907848a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.548s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.812952] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820409, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.837833] env[63241]: DEBUG oslo_concurrency.lockutils [req-f445d160-52e5-4447-88e7-adf0479572f7 req-6ecace2d-e8ad-4963-8225-eec329198b34 service nova] Releasing lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.025143] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Releasing lock "refresh_cache-943100f1-e702-4869-8c19-d81d39712ac5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.025538] env[63241]: DEBUG nova.compute.manager [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Instance network_info: |[{"id": "8ea61e21-b365-48ae-a2a2-7197b4b7151d", "address": "fa:16:3e:40:a4:15", "network": {"id": "703ff09e-e307-4763-aeb5-a229ef649bc3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1085824559", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.167", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ea61e21-b3", "ovs_interfaceid": "8ea61e21-b365-48ae-a2a2-7197b4b7151d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9e415974-1e27-45d2-acb0-cdef98386304", "address": "fa:16:3e:35:f2:29", "network": {"id": "6df5f181-71f0-4624-9995-e0c8715ebd0a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-876749984", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e415974-1e", "ovs_interfaceid": "9e415974-1e27-45d2-acb0-cdef98386304", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1561.025841] env[63241]: DEBUG oslo_concurrency.lockutils [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] Acquired lock "refresh_cache-943100f1-e702-4869-8c19-d81d39712ac5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.026030] env[63241]: DEBUG nova.network.neutron [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Refreshing network info cache for port 9e415974-1e27-45d2-acb0-cdef98386304 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1561.027319] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:a4:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a27fd90b-16a5-43af-bede-ae36762ece00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ea61e21-b365-48ae-a2a2-7197b4b7151d', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:f2:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e415974-1e27-45d2-acb0-cdef98386304', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1561.037579] env[63241]: DEBUG oslo.service.loopingcall [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.038672] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1561.038900] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c39c3276-e90f-43d4-8910-0512e8c44d22 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.063564] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1561.063564] env[63241]: value = "task-1820410" [ 1561.063564] env[63241]: _type = "Task" [ 1561.063564] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.074424] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820410, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.315474] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820409, 'name': CloneVM_Task} progress is 95%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.578740] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820410, 'name': CreateVM_Task, 'duration_secs': 0.48597} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.579288] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1561.580263] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.580616] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.582021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1561.582021] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da400ae0-851a-437e-834a-c05c55fb7227 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.595037] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1561.595037] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522bb5b5-a8a1-53a3-83df-133ddb9eec27" [ 1561.595037] env[63241]: _type = "Task" [ 1561.595037] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.606197] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522bb5b5-a8a1-53a3-83df-133ddb9eec27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.675154] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1904fb82-a3b7-4fdd-bd03-c8b13e4b0122 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.683787] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acfe44f9-fc5b-4b5b-bcf3-b5404aff4194 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.718422] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c86864-8217-4f67-a002-f5efb5380d1e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.726297] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa599b0-eea0-45da-ac44-d6c619e12226 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.740280] env[63241]: DEBUG nova.compute.provider_tree [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.814251] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820409, 'name': CloneVM_Task, 'duration_secs': 1.919616} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.814514] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Created linked-clone VM from snapshot [ 1561.815288] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a6db58-454d-45d2-869f-2ceb7a6813a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.824712] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Uploading image c1a9efad-4b00-49f7-b45d-397c1ef5260b {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1561.848032] env[63241]: DEBUG oslo_vmware.rw_handles [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1561.848032] env[63241]: value = "vm-377086" [ 1561.848032] env[63241]: _type = "VirtualMachine" [ 1561.848032] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1561.848032] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bdb4bd42-9dbc-44fb-b7c8-0f2be6ffa759 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.852857] env[63241]: DEBUG nova.network.neutron [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Updated VIF entry in instance network info cache for port 9e415974-1e27-45d2-acb0-cdef98386304. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1561.852857] env[63241]: DEBUG nova.network.neutron [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Updating instance_info_cache with network_info: [{"id": "8ea61e21-b365-48ae-a2a2-7197b4b7151d", "address": "fa:16:3e:40:a4:15", "network": {"id": "703ff09e-e307-4763-aeb5-a229ef649bc3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1085824559", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.167", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ea61e21-b3", "ovs_interfaceid": "8ea61e21-b365-48ae-a2a2-7197b4b7151d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9e415974-1e27-45d2-acb0-cdef98386304", "address": "fa:16:3e:35:f2:29", "network": {"id": "6df5f181-71f0-4624-9995-e0c8715ebd0a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-876749984", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "5b874b5f558e48e9a83b27e69d262106", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e415974-1e", "ovs_interfaceid": "9e415974-1e27-45d2-acb0-cdef98386304", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.856564] env[63241]: DEBUG oslo_vmware.rw_handles [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lease: (returnval){ [ 1561.856564] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5276fe14-62ff-0e5e-28da-ec9577162f26" [ 1561.856564] env[63241]: _type = "HttpNfcLease" [ 1561.856564] env[63241]: } obtained for exporting VM: (result){ [ 1561.856564] env[63241]: value = "vm-377086" [ 1561.856564] env[63241]: _type = "VirtualMachine" [ 1561.856564] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1561.856852] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the lease: (returnval){ [ 1561.856852] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5276fe14-62ff-0e5e-28da-ec9577162f26" [ 1561.856852] env[63241]: _type = "HttpNfcLease" [ 1561.856852] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1561.864622] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1561.864622] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5276fe14-62ff-0e5e-28da-ec9577162f26" [ 1561.864622] env[63241]: _type = "HttpNfcLease" [ 1561.864622] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1562.104358] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522bb5b5-a8a1-53a3-83df-133ddb9eec27, 'name': SearchDatastore_Task, 'duration_secs': 0.017462} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.104666] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.104906] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1562.105199] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.105278] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.107377] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1562.107377] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-debe47d9-347d-47ab-8447-c79575bef92e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.114591] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1562.114815] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1562.115632] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-762ee8b1-1eca-4c0d-bd41-d8fca4a450ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.121292] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1562.121292] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52fd2d6b-36f7-2ad9-ad54-d8ba91a3f1f7" [ 1562.121292] env[63241]: _type = "Task" [ 1562.121292] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.129687] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fd2d6b-36f7-2ad9-ad54-d8ba91a3f1f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.245515] env[63241]: DEBUG nova.scheduler.client.report [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1562.355537] env[63241]: DEBUG oslo_concurrency.lockutils [req-5a8f379c-4c5b-49b1-ad50-76f08412de03 req-a31a2914-733f-45b6-9828-894342bc0629 service nova] Releasing lock "refresh_cache-943100f1-e702-4869-8c19-d81d39712ac5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.368037] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1562.368037] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5276fe14-62ff-0e5e-28da-ec9577162f26" [ 1562.368037] env[63241]: _type = "HttpNfcLease" [ 1562.368037] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1562.368037] env[63241]: DEBUG oslo_vmware.rw_handles [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1562.368037] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5276fe14-62ff-0e5e-28da-ec9577162f26" [ 1562.368037] env[63241]: _type = "HttpNfcLease" [ 1562.368037] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1562.369052] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9077399d-6490-4a73-b35f-8406228c7525 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.377814] env[63241]: DEBUG oslo_vmware.rw_handles [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52825e67-d7c2-e55b-953f-8d058ba158c0/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1562.378021] env[63241]: DEBUG oslo_vmware.rw_handles [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52825e67-d7c2-e55b-953f-8d058ba158c0/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1562.494342] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-30fcca0b-83bf-4a67-a110-fe5dc1c0525c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.634410] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fd2d6b-36f7-2ad9-ad54-d8ba91a3f1f7, 'name': SearchDatastore_Task, 'duration_secs': 0.012197} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.635481] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18267d88-ade2-4a33-97df-12c5bd1b3e23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.642498] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1562.642498] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52695ccf-1674-6938-b163-b69e21dae34f" [ 1562.642498] env[63241]: _type = "Task" [ 1562.642498] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.651616] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52695ccf-1674-6938-b163-b69e21dae34f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.751397] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.579s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.751873] env[63241]: DEBUG nova.compute.manager [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1562.755479] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.711s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.755720] env[63241]: DEBUG nova.objects.instance [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Lazy-loading 'resources' on Instance uuid 5fce9350-6d45-4bfb-a74b-f5b384ecb16c {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1563.157107] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52695ccf-1674-6938-b163-b69e21dae34f, 'name': SearchDatastore_Task, 'duration_secs': 0.015672} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.157555] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.158038] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 943100f1-e702-4869-8c19-d81d39712ac5/943100f1-e702-4869-8c19-d81d39712ac5.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1563.158484] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9698a28-d8df-4929-8344-9c7b6e3ff2e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.167349] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1563.167349] env[63241]: value = "task-1820412" [ 1563.167349] env[63241]: _type = "Task" [ 1563.167349] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.179307] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820412, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.259506] env[63241]: DEBUG nova.compute.utils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1563.272325] env[63241]: DEBUG nova.compute.manager [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1563.275606] env[63241]: DEBUG nova.network.neutron [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1563.410871] env[63241]: DEBUG nova.policy [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa5224c96c3545269f4f45be620a7cdf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98582d7ee18145318ee5a05cac36781e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1563.680522] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820412, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.771450] env[63241]: DEBUG nova.compute.manager [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1563.819863] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2929b92b-6d21-4ce0-815a-088667640c20 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.829332] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066acb4e-123c-4c43-b9c6-5a9604b060d9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.870141] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6dea2bf-22b6-4e88-9898-1373e5e5c170 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.874235] env[63241]: DEBUG nova.network.neutron [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Successfully created port: 8247f3da-5f40-4223-aad2-aa20105cd12d {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1563.884740] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fc6e87-135a-4a96-afa9-59f55e2bd45e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.901992] env[63241]: DEBUG nova.compute.provider_tree [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1564.183730] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820412, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.739652} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.183982] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 943100f1-e702-4869-8c19-d81d39712ac5/943100f1-e702-4869-8c19-d81d39712ac5.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1564.184374] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1564.184717] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b05462c8-2c02-4409-9d27-74b7c6e4d0ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.194070] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1564.194070] env[63241]: value = "task-1820413" [ 1564.194070] env[63241]: _type = "Task" [ 1564.194070] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.203654] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820413, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.405557] env[63241]: DEBUG nova.scheduler.client.report [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1564.705873] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820413, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073508} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.706390] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1564.707472] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9af9afa-6a37-491c-ae74-edd09507524f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.734332] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 943100f1-e702-4869-8c19-d81d39712ac5/943100f1-e702-4869-8c19-d81d39712ac5.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1564.734687] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd19525e-8ee2-4e57-9bad-801e0ca3533a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.756300] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1564.756300] env[63241]: value = "task-1820414" [ 1564.756300] env[63241]: _type = "Task" [ 1564.756300] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.765721] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820414, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.783273] env[63241]: DEBUG nova.compute.manager [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1564.912693] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.157s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.915564] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.942s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.917365] env[63241]: INFO nova.compute.claims [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1564.946344] env[63241]: INFO nova.scheduler.client.report [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Deleted allocations for instance 5fce9350-6d45-4bfb-a74b-f5b384ecb16c [ 1565.273426] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820414, 'name': ReconfigVM_Task, 'duration_secs': 0.306377} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.273856] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 943100f1-e702-4869-8c19-d81d39712ac5/943100f1-e702-4869-8c19-d81d39712ac5.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1565.275548] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffdcb010-99d3-4db7-99ba-6acef7d8a491 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.286388] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1565.286388] env[63241]: value = "task-1820415" [ 1565.286388] env[63241]: _type = "Task" [ 1565.286388] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.298535] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820415, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.454748] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91fa413d-e291-4c91-a2b8-16f22c9e4382 tempest-ServersTestFqdnHostnames-2110418441 tempest-ServersTestFqdnHostnames-2110418441-project-member] Lock "5fce9350-6d45-4bfb-a74b-f5b384ecb16c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.156s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.546475] env[63241]: DEBUG nova.network.neutron [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Successfully updated port: 8247f3da-5f40-4223-aad2-aa20105cd12d {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1565.800078] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820415, 'name': Rename_Task, 'duration_secs': 0.203911} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.800442] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1565.800753] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3f5a588-36f7-485a-82cb-2e3f035622a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.807689] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1565.807689] env[63241]: value = "task-1820416" [ 1565.807689] env[63241]: _type = "Task" [ 1565.807689] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.816602] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820416, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.055533] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "refresh_cache-34d138e6-90b3-4243-bf45-96ae856cd631" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.055686] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "refresh_cache-34d138e6-90b3-4243-bf45-96ae856cd631" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.055839] env[63241]: DEBUG nova.network.neutron [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1566.318496] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820416, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.404110] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e2decf-ede7-443c-8b6f-eec20c385026 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.412949] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0a2559-2917-447b-ae63-0ec9b34d7619 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.446602] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b14048a-c4a0-45e9-a88c-b3fa88ba21d8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.455411] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f6ed1f-ba8e-4910-9498-ee42c6758499 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.469285] env[63241]: DEBUG nova.compute.provider_tree [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1566.594296] env[63241]: DEBUG nova.network.neutron [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1566.748641] env[63241]: DEBUG nova.network.neutron [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Updating instance_info_cache with network_info: [{"id": "8247f3da-5f40-4223-aad2-aa20105cd12d", "address": "fa:16:3e:fd:62:8e", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8247f3da-5f", "ovs_interfaceid": "8247f3da-5f40-4223-aad2-aa20105cd12d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.820391] env[63241]: DEBUG oslo_vmware.api [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820416, 'name': PowerOnVM_Task, 'duration_secs': 0.6545} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.820857] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1566.820982] env[63241]: INFO nova.compute.manager [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Took 11.84 seconds to spawn the instance on the hypervisor. [ 1566.821181] env[63241]: DEBUG nova.compute.manager [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1566.822071] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a35cd5-5b4d-4751-b622-03dbeb0e663c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.973190] env[63241]: DEBUG nova.scheduler.client.report [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1567.251646] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "refresh_cache-34d138e6-90b3-4243-bf45-96ae856cd631" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.251970] env[63241]: DEBUG nova.compute.manager [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Instance network_info: |[{"id": "8247f3da-5f40-4223-aad2-aa20105cd12d", "address": "fa:16:3e:fd:62:8e", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8247f3da-5f", "ovs_interfaceid": "8247f3da-5f40-4223-aad2-aa20105cd12d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1567.340865] env[63241]: INFO nova.compute.manager [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Took 63.08 seconds to build instance. [ 1567.479998] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.480623] env[63241]: DEBUG nova.compute.manager [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1567.483583] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.189s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.483863] env[63241]: DEBUG nova.objects.instance [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lazy-loading 'resources' on Instance uuid 2d1425f2-ddf9-4e82-bcfe-e11c597d011a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1567.843337] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c71adb0f-2ac4-47fa-bce6-fdb58c40adfc tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "943100f1-e702-4869-8c19-d81d39712ac5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.589s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.987625] env[63241]: DEBUG nova.compute.utils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1567.993034] env[63241]: DEBUG nova.compute.manager [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1567.993363] env[63241]: DEBUG nova.network.neutron [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1568.033456] env[63241]: DEBUG nova.policy [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ddb9a20f31d4823b9f0e79e6ebef57c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b735b36ffab04201b2eed0bccc92c07a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1568.314296] env[63241]: DEBUG nova.network.neutron [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Successfully created port: a8a4fec8-526a-4a17-8012-e0b4703e0b50 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1568.380105] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ba866f-6218-48f9-b236-e37977c0ee54 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.388765] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91315366-55d0-44a7-b398-92a1c16bd256 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.421124] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abab633-24d3-4a9c-a977-588083c579d8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.429639] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0395aafe-f994-463e-967e-89c612539829 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.445944] env[63241]: DEBUG nova.compute.provider_tree [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1568.493521] env[63241]: DEBUG nova.compute.manager [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1568.616302] env[63241]: DEBUG nova.virt.hardware [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1568.616783] env[63241]: DEBUG nova.virt.hardware [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1568.616783] env[63241]: DEBUG nova.virt.hardware [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1568.616961] env[63241]: DEBUG nova.virt.hardware [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1568.621029] env[63241]: DEBUG nova.virt.hardware [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1568.621029] env[63241]: DEBUG nova.virt.hardware [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1568.621029] env[63241]: DEBUG nova.virt.hardware [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1568.621029] env[63241]: DEBUG nova.virt.hardware [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1568.621029] env[63241]: DEBUG nova.virt.hardware [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1568.621029] env[63241]: DEBUG nova.virt.hardware [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1568.621029] env[63241]: DEBUG nova.virt.hardware [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1568.621029] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5f45c4-dcaa-4d94-972d-bc5da9ab8222 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.630494] env[63241]: DEBUG oslo_vmware.rw_handles [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cea0fe-a0ba-d5a6-c302-0fbc0e86c7d2/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1568.631844] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6386cd31-968d-4b7a-9327-458db7a4527b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.636427] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8245e49c-f2e6-4f67-a44a-44718fcb45c6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.651016] env[63241]: DEBUG oslo_vmware.rw_handles [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cea0fe-a0ba-d5a6-c302-0fbc0e86c7d2/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1568.651230] env[63241]: ERROR oslo_vmware.rw_handles [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cea0fe-a0ba-d5a6-c302-0fbc0e86c7d2/disk-0.vmdk due to incomplete transfer. [ 1568.651836] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:62:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '111a2767-1b06-4fe5-852b-40c9b5a699fd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8247f3da-5f40-4223-aad2-aa20105cd12d', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1568.659715] env[63241]: DEBUG oslo.service.loopingcall [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1568.660027] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-947b7296-9463-4906-b41d-de91de7f1c1c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.661892] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1568.662134] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e8df0e7-d2ab-4bef-8f95-5605cd282024 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.684662] env[63241]: DEBUG oslo_vmware.rw_handles [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cea0fe-a0ba-d5a6-c302-0fbc0e86c7d2/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1568.684911] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Uploaded image 329bc4d0-0ed9-4ffe-a843-80beee7f7bfa to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1568.687945] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1568.689736] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8d8f167c-f037-43ae-825d-3cb3be305855 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.692249] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1568.692249] env[63241]: value = "task-1820417" [ 1568.692249] env[63241]: _type = "Task" [ 1568.692249] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.698335] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1568.698335] env[63241]: value = "task-1820418" [ 1568.698335] env[63241]: _type = "Task" [ 1568.698335] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.706226] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820417, 'name': CreateVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.713674] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820418, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.949828] env[63241]: DEBUG nova.scheduler.client.report [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1569.203341] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820417, 'name': CreateVM_Task, 'duration_secs': 0.487464} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.207098] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1569.207875] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.208161] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.208616] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1569.209496] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76d44edd-b56d-4cc9-80eb-70da6567e6ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.218642] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820418, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.222801] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1569.222801] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]525c3a10-cdaf-cf19-6096-8a1ff640d36b" [ 1569.222801] env[63241]: _type = "Task" [ 1569.222801] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.231752] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525c3a10-cdaf-cf19-6096-8a1ff640d36b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.441186] env[63241]: DEBUG nova.compute.manager [req-aefdca1f-4d47-41e0-933c-44059d3a6796 req-498ace08-7cbf-4b7d-9e27-b28d9ac1c8ae service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Received event network-vif-plugged-8247f3da-5f40-4223-aad2-aa20105cd12d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1569.441383] env[63241]: DEBUG oslo_concurrency.lockutils [req-aefdca1f-4d47-41e0-933c-44059d3a6796 req-498ace08-7cbf-4b7d-9e27-b28d9ac1c8ae service nova] Acquiring lock "34d138e6-90b3-4243-bf45-96ae856cd631-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.441687] env[63241]: DEBUG oslo_concurrency.lockutils [req-aefdca1f-4d47-41e0-933c-44059d3a6796 req-498ace08-7cbf-4b7d-9e27-b28d9ac1c8ae service nova] Lock "34d138e6-90b3-4243-bf45-96ae856cd631-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.441942] env[63241]: DEBUG oslo_concurrency.lockutils [req-aefdca1f-4d47-41e0-933c-44059d3a6796 req-498ace08-7cbf-4b7d-9e27-b28d9ac1c8ae service nova] Lock "34d138e6-90b3-4243-bf45-96ae856cd631-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.442165] env[63241]: DEBUG nova.compute.manager [req-aefdca1f-4d47-41e0-933c-44059d3a6796 req-498ace08-7cbf-4b7d-9e27-b28d9ac1c8ae service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] No waiting events found dispatching network-vif-plugged-8247f3da-5f40-4223-aad2-aa20105cd12d {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1569.442358] env[63241]: WARNING nova.compute.manager [req-aefdca1f-4d47-41e0-933c-44059d3a6796 req-498ace08-7cbf-4b7d-9e27-b28d9ac1c8ae service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Received unexpected event network-vif-plugged-8247f3da-5f40-4223-aad2-aa20105cd12d for instance with vm_state building and task_state spawning. [ 1569.455751] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.972s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.459742] env[63241]: DEBUG oslo_concurrency.lockutils [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.601s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.459742] env[63241]: DEBUG nova.objects.instance [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Lazy-loading 'resources' on Instance uuid eaed706d-b3db-46ed-8c70-08f80479afa4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1569.483589] env[63241]: INFO nova.scheduler.client.report [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted allocations for instance 2d1425f2-ddf9-4e82-bcfe-e11c597d011a [ 1569.503713] env[63241]: DEBUG nova.compute.manager [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1569.539145] env[63241]: DEBUG nova.virt.hardware [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1569.539786] env[63241]: DEBUG nova.virt.hardware [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1569.539786] env[63241]: DEBUG nova.virt.hardware [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1569.539786] env[63241]: DEBUG nova.virt.hardware [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1569.540195] env[63241]: DEBUG nova.virt.hardware [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1569.540195] env[63241]: DEBUG nova.virt.hardware [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1569.540412] env[63241]: DEBUG nova.virt.hardware [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1569.540502] env[63241]: DEBUG nova.virt.hardware [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1569.540794] env[63241]: DEBUG nova.virt.hardware [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1569.540999] env[63241]: DEBUG nova.virt.hardware [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1569.541197] env[63241]: DEBUG nova.virt.hardware [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1569.542464] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf705ce-5673-4008-a1e4-e0f57bcf389d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.552243] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6feb7a2e-ea45-4bba-8e6c-4543589efc8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.713514] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820418, 'name': Destroy_Task, 'duration_secs': 0.579316} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.713514] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Destroyed the VM [ 1569.713514] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1569.713514] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1477ca32-22f8-4a72-b740-8e6fb4cc7d92 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.719975] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1569.719975] env[63241]: value = "task-1820419" [ 1569.719975] env[63241]: _type = "Task" [ 1569.719975] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.731207] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820419, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.735206] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525c3a10-cdaf-cf19-6096-8a1ff640d36b, 'name': SearchDatastore_Task, 'duration_secs': 0.013074} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.735828] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.735828] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1569.735963] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.736125] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.736368] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1569.736690] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96224b59-3c2d-433f-9f69-5e57e032006c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.745415] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1569.745608] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1569.746409] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ad7e2ac-478e-41e7-a0ea-2238049300db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.751679] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1569.751679] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524f0fdb-cd79-0500-41cb-f619a02ae183" [ 1569.751679] env[63241]: _type = "Task" [ 1569.751679] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.759487] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524f0fdb-cd79-0500-41cb-f619a02ae183, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.991746] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5b70b349-7fe0-4109-927d-c15a34f009c5 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "2d1425f2-ddf9-4e82-bcfe-e11c597d011a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.789s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.103381] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "943100f1-e702-4869-8c19-d81d39712ac5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.103729] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "943100f1-e702-4869-8c19-d81d39712ac5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.103860] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "943100f1-e702-4869-8c19-d81d39712ac5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.104101] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "943100f1-e702-4869-8c19-d81d39712ac5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.104244] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "943100f1-e702-4869-8c19-d81d39712ac5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.107869] env[63241]: INFO nova.compute.manager [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Terminating instance [ 1570.110013] env[63241]: DEBUG nova.compute.manager [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1570.110164] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1570.111537] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d811487d-e832-4953-8ca9-70766fad84ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.120094] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1570.120737] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3042a44a-c947-41fc-87b9-98ae17aaa58b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.127492] env[63241]: DEBUG oslo_vmware.api [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1570.127492] env[63241]: value = "task-1820420" [ 1570.127492] env[63241]: _type = "Task" [ 1570.127492] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.138196] env[63241]: DEBUG oslo_vmware.api [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820420, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.231993] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820419, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.263355] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524f0fdb-cd79-0500-41cb-f619a02ae183, 'name': SearchDatastore_Task, 'duration_secs': 0.010264} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.267058] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-331cb694-c317-4bc2-be8f-ba4595bfbc1f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.273129] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1570.273129] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52bdd59f-747c-50db-28e1-80b4381a63c3" [ 1570.273129] env[63241]: _type = "Task" [ 1570.273129] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.282614] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52bdd59f-747c-50db-28e1-80b4381a63c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.405204] env[63241]: DEBUG nova.network.neutron [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Successfully updated port: a8a4fec8-526a-4a17-8012-e0b4703e0b50 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1570.500328] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b01f5d-91d2-454e-8794-84dc421b3782 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.508962] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76dc33a7-bdcf-44ce-b160-66c79df6e36a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.543621] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15356e26-43c2-4508-8f89-a10ed05fa5b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.552263] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0607c6-4856-41e2-8d51-04db2073d7b1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.573912] env[63241]: DEBUG nova.compute.provider_tree [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1570.642374] env[63241]: DEBUG oslo_vmware.api [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820420, 'name': PowerOffVM_Task, 'duration_secs': 0.220399} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.642656] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1570.642803] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1570.643095] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4e6606b-4762-45b3-a2a0-6e2b7758d9ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.683308] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquiring lock "cfdc6b34-6940-414f-b17d-6fe17f92474a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.683426] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Lock "cfdc6b34-6940-414f-b17d-6fe17f92474a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.717768] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "f583adda-976e-4f79-adc7-0b4e1a73ad73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.718036] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "f583adda-976e-4f79-adc7-0b4e1a73ad73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.730976] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820419, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.784644] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52bdd59f-747c-50db-28e1-80b4381a63c3, 'name': SearchDatastore_Task, 'duration_secs': 0.019015} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.784644] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.784644] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 34d138e6-90b3-4243-bf45-96ae856cd631/34d138e6-90b3-4243-bf45-96ae856cd631.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1570.784644] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-716be697-d9bf-468f-ad2c-27490b56a77e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.791747] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1570.791747] env[63241]: value = "task-1820422" [ 1570.791747] env[63241]: _type = "Task" [ 1570.791747] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.800927] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820422, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.816204] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1570.816437] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1570.816703] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Deleting the datastore file [datastore1] 943100f1-e702-4869-8c19-d81d39712ac5 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1570.817047] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3229e20d-0a19-44d0-9a72-0a86da47818e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.824510] env[63241]: DEBUG oslo_vmware.api [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for the task: (returnval){ [ 1570.824510] env[63241]: value = "task-1820423" [ 1570.824510] env[63241]: _type = "Task" [ 1570.824510] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.833710] env[63241]: DEBUG oslo_vmware.api [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.907972] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquiring lock "refresh_cache-f4eb5e49-bae0-435c-93f0-15d6939f9e7c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.908137] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquired lock "refresh_cache-f4eb5e49-bae0-435c-93f0-15d6939f9e7c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.908290] env[63241]: DEBUG nova.network.neutron [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1571.077339] env[63241]: DEBUG nova.scheduler.client.report [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1571.186354] env[63241]: DEBUG nova.compute.manager [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1571.221794] env[63241]: DEBUG nova.compute.manager [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1571.233946] env[63241]: DEBUG oslo_vmware.api [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820419, 'name': RemoveSnapshot_Task, 'duration_secs': 1.048648} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.234484] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1571.235091] env[63241]: INFO nova.compute.manager [None req-dc72a425-ad19-4d03-b2a2-85a47273477f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Took 16.64 seconds to snapshot the instance on the hypervisor. [ 1571.306533] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820422, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.335987] env[63241]: DEBUG oslo_vmware.api [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Task: {'id': task-1820423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.381638} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.336644] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1571.336839] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1571.338016] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1571.338016] env[63241]: INFO nova.compute.manager [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1571.338016] env[63241]: DEBUG oslo.service.loopingcall [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1571.338016] env[63241]: DEBUG nova.compute.manager [-] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1571.338016] env[63241]: DEBUG nova.network.neutron [-] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1571.456911] env[63241]: DEBUG nova.network.neutron [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1571.510122] env[63241]: DEBUG oslo_vmware.rw_handles [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52825e67-d7c2-e55b-953f-8d058ba158c0/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1571.511589] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8e1d7b-6c0e-4072-bb4a-b577b05dae4b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.521279] env[63241]: DEBUG oslo_vmware.rw_handles [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52825e67-d7c2-e55b-953f-8d058ba158c0/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1571.521279] env[63241]: ERROR oslo_vmware.rw_handles [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52825e67-d7c2-e55b-953f-8d058ba158c0/disk-0.vmdk due to incomplete transfer. [ 1571.521279] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6eaf8378-b9b0-4f9a-b169-0913fc2cebba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.528329] env[63241]: DEBUG oslo_vmware.rw_handles [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52825e67-d7c2-e55b-953f-8d058ba158c0/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1571.528402] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Uploaded image c1a9efad-4b00-49f7-b45d-397c1ef5260b to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1571.529985] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1571.530416] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-23b3f14e-5aa7-49a3-a60c-2436b9589fba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.538216] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1571.538216] env[63241]: value = "task-1820424" [ 1571.538216] env[63241]: _type = "Task" [ 1571.538216] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.546935] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820424, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.582615] env[63241]: DEBUG oslo_concurrency.lockutils [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.123s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.585081] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 46.474s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.585948] env[63241]: DEBUG nova.objects.instance [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1571.611801] env[63241]: INFO nova.scheduler.client.report [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Deleted allocations for instance eaed706d-b3db-46ed-8c70-08f80479afa4 [ 1571.637645] env[63241]: DEBUG nova.compute.manager [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Received event network-changed-8247f3da-5f40-4223-aad2-aa20105cd12d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1571.637645] env[63241]: DEBUG nova.compute.manager [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Refreshing instance network info cache due to event network-changed-8247f3da-5f40-4223-aad2-aa20105cd12d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1571.637814] env[63241]: DEBUG oslo_concurrency.lockutils [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] Acquiring lock "refresh_cache-34d138e6-90b3-4243-bf45-96ae856cd631" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.637956] env[63241]: DEBUG oslo_concurrency.lockutils [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] Acquired lock "refresh_cache-34d138e6-90b3-4243-bf45-96ae856cd631" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.639898] env[63241]: DEBUG nova.network.neutron [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Refreshing network info cache for port 8247f3da-5f40-4223-aad2-aa20105cd12d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1571.646773] env[63241]: DEBUG nova.network.neutron [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Updating instance_info_cache with network_info: [{"id": "a8a4fec8-526a-4a17-8012-e0b4703e0b50", "address": "fa:16:3e:12:ac:e0", "network": {"id": "9a953b80-cf2c-4a1c-b81c-ade42bc90e20", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-958847003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b735b36ffab04201b2eed0bccc92c07a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a4fec8-52", "ovs_interfaceid": "a8a4fec8-526a-4a17-8012-e0b4703e0b50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.711297] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.743728] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.804025] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820422, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576349} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.804025] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 34d138e6-90b3-4243-bf45-96ae856cd631/34d138e6-90b3-4243-bf45-96ae856cd631.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1571.804025] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1571.804025] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b70bd84f-9a1a-466b-bc58-d1486d0afbdc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.811285] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1571.811285] env[63241]: value = "task-1820425" [ 1571.811285] env[63241]: _type = "Task" [ 1571.811285] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.820931] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820425, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.049589] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820424, 'name': Destroy_Task, 'duration_secs': 0.345175} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.049589] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Destroyed the VM [ 1572.049777] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1572.050046] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-358302b2-eb37-4525-bc0d-55aaaffffb88 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.056868] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1572.056868] env[63241]: value = "task-1820426" [ 1572.056868] env[63241]: _type = "Task" [ 1572.056868] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.068765] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820426, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.122828] env[63241]: DEBUG oslo_concurrency.lockutils [None req-592398c8-49cf-438a-9d55-25448c39d979 tempest-TenantUsagesTestJSON-839879260 tempest-TenantUsagesTestJSON-839879260-project-member] Lock "eaed706d-b3db-46ed-8c70-08f80479afa4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.662s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.148937] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Releasing lock "refresh_cache-f4eb5e49-bae0-435c-93f0-15d6939f9e7c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.149253] env[63241]: DEBUG nova.compute.manager [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Instance network_info: |[{"id": "a8a4fec8-526a-4a17-8012-e0b4703e0b50", "address": "fa:16:3e:12:ac:e0", "network": {"id": "9a953b80-cf2c-4a1c-b81c-ade42bc90e20", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-958847003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b735b36ffab04201b2eed0bccc92c07a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a4fec8-52", "ovs_interfaceid": "a8a4fec8-526a-4a17-8012-e0b4703e0b50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1572.149697] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:ac:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dced2f3d-7fd3-4a42-836d-9f02dab4c949', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8a4fec8-526a-4a17-8012-e0b4703e0b50', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1572.159604] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Creating folder: Project (b735b36ffab04201b2eed0bccc92c07a). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1572.161179] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe950519-0c36-4a5b-8e26-2bc10ec4aaab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.171972] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Created folder: Project (b735b36ffab04201b2eed0bccc92c07a) in parent group-v376927. [ 1572.172899] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Creating folder: Instances. Parent ref: group-v377089. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1572.173262] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e4f2b5c-2365-4482-8cde-4770445bc397 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.184069] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Created folder: Instances in parent group-v377089. [ 1572.184380] env[63241]: DEBUG oslo.service.loopingcall [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1572.184598] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1572.184864] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8053930-c321-4cc0-b369-e26a0c373fba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.212215] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1572.212215] env[63241]: value = "task-1820429" [ 1572.212215] env[63241]: _type = "Task" [ 1572.212215] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.221358] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820429, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.323377] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820425, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069708} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.323655] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1572.324477] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ef2975-acc9-4925-b6d1-c26e291848ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.348513] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 34d138e6-90b3-4243-bf45-96ae856cd631/34d138e6-90b3-4243-bf45-96ae856cd631.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1572.348835] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31f79c09-2c78-4741-a1c2-5ba86c604134 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.371737] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1572.371737] env[63241]: value = "task-1820430" [ 1572.371737] env[63241]: _type = "Task" [ 1572.371737] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.380094] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820430, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.517428] env[63241]: DEBUG nova.network.neutron [-] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.561671] env[63241]: DEBUG nova.network.neutron [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Updated VIF entry in instance network info cache for port 8247f3da-5f40-4223-aad2-aa20105cd12d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1572.562100] env[63241]: DEBUG nova.network.neutron [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Updating instance_info_cache with network_info: [{"id": "8247f3da-5f40-4223-aad2-aa20105cd12d", "address": "fa:16:3e:fd:62:8e", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8247f3da-5f", "ovs_interfaceid": "8247f3da-5f40-4223-aad2-aa20105cd12d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.570098] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820426, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.599687] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3cdd30a-828c-40c8-85ea-3b255e2e2b9c tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.599687] env[63241]: DEBUG oslo_concurrency.lockutils [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.298s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.599687] env[63241]: DEBUG nova.objects.instance [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lazy-loading 'resources' on Instance uuid 81854e13-e0c1-43a9-8529-678d56d57bbf {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1572.730342] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820429, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.882095] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820430, 'name': ReconfigVM_Task, 'duration_secs': 0.333991} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.884097] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 34d138e6-90b3-4243-bf45-96ae856cd631/34d138e6-90b3-4243-bf45-96ae856cd631.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1572.884097] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d9f481b-cbdd-4ab9-883d-889944c802df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.890373] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1572.890373] env[63241]: value = "task-1820431" [ 1572.890373] env[63241]: _type = "Task" [ 1572.890373] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.898427] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820431, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.020386] env[63241]: INFO nova.compute.manager [-] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Took 1.68 seconds to deallocate network for instance. [ 1573.067408] env[63241]: DEBUG oslo_concurrency.lockutils [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] Releasing lock "refresh_cache-34d138e6-90b3-4243-bf45-96ae856cd631" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.067698] env[63241]: DEBUG nova.compute.manager [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Received event network-vif-plugged-a8a4fec8-526a-4a17-8012-e0b4703e0b50 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1573.067904] env[63241]: DEBUG oslo_concurrency.lockutils [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] Acquiring lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.068162] env[63241]: DEBUG oslo_concurrency.lockutils [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] Lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.068341] env[63241]: DEBUG oslo_concurrency.lockutils [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] Lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.068534] env[63241]: DEBUG nova.compute.manager [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] No waiting events found dispatching network-vif-plugged-a8a4fec8-526a-4a17-8012-e0b4703e0b50 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1573.068734] env[63241]: WARNING nova.compute.manager [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Received unexpected event network-vif-plugged-a8a4fec8-526a-4a17-8012-e0b4703e0b50 for instance with vm_state building and task_state spawning. [ 1573.068927] env[63241]: DEBUG nova.compute.manager [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Received event network-changed-a8a4fec8-526a-4a17-8012-e0b4703e0b50 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1573.069146] env[63241]: DEBUG nova.compute.manager [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Refreshing instance network info cache due to event network-changed-a8a4fec8-526a-4a17-8012-e0b4703e0b50. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1573.069364] env[63241]: DEBUG oslo_concurrency.lockutils [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] Acquiring lock "refresh_cache-f4eb5e49-bae0-435c-93f0-15d6939f9e7c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.069506] env[63241]: DEBUG oslo_concurrency.lockutils [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] Acquired lock "refresh_cache-f4eb5e49-bae0-435c-93f0-15d6939f9e7c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.069667] env[63241]: DEBUG nova.network.neutron [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Refreshing network info cache for port a8a4fec8-526a-4a17-8012-e0b4703e0b50 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1573.070995] env[63241]: DEBUG oslo_vmware.api [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820426, 'name': RemoveSnapshot_Task, 'duration_secs': 0.659012} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.071263] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1573.071492] env[63241]: INFO nova.compute.manager [None req-68189365-9c97-4daa-874a-1f13d8cf97c4 tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Took 15.37 seconds to snapshot the instance on the hypervisor. [ 1573.223296] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820429, 'name': CreateVM_Task, 'duration_secs': 0.654551} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.223537] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1573.224190] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.224379] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.224733] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1573.227477] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1d29f48-c9b9-47de-91aa-fa1b5042a186 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.232717] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for the task: (returnval){ [ 1573.232717] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c025cb-97f2-97cc-253c-4a098488b07d" [ 1573.232717] env[63241]: _type = "Task" [ 1573.232717] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.240802] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c025cb-97f2-97cc-253c-4a098488b07d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.246867] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "1626092d-78ef-41b5-8b47-fb840d63e4f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.247102] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.399643] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820431, 'name': Rename_Task, 'duration_secs': 0.170175} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.400045] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1573.400158] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31cb8b1f-b8da-40a5-bb58-9222fc12d3a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.406444] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1573.406444] env[63241]: value = "task-1820432" [ 1573.406444] env[63241]: _type = "Task" [ 1573.406444] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.413769] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820432, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.445933] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfaae8f-7160-4e9f-ad4d-79ad249feda1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.454049] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f950b648-25fe-4be0-b418-23922ee13850 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.484991] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0e12a9-e016-4fc4-bceb-879f2e511f79 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.492614] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ae4aab-78e2-4498-9acd-c63b047e675f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.505861] env[63241]: DEBUG nova.compute.provider_tree [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.528963] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.708117] env[63241]: DEBUG nova.compute.manager [req-0c391b15-a882-406d-a44c-58355d7a5f50 req-71feb20c-b099-4e01-9d5f-348de46efc92 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Received event network-vif-deleted-9e415974-1e27-45d2-acb0-cdef98386304 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1573.708347] env[63241]: DEBUG nova.compute.manager [req-0c391b15-a882-406d-a44c-58355d7a5f50 req-71feb20c-b099-4e01-9d5f-348de46efc92 service nova] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Received event network-vif-deleted-8ea61e21-b365-48ae-a2a2-7197b4b7151d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1573.744511] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c025cb-97f2-97cc-253c-4a098488b07d, 'name': SearchDatastore_Task, 'duration_secs': 0.009812} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.744819] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.745064] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1573.745298] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.745443] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.745619] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1573.745873] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06194f15-3d64-4914-a6fb-9afe2cfa000d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.750343] env[63241]: DEBUG nova.compute.manager [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1573.754418] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1573.754524] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1573.755265] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5745301-d7b7-41bc-b4db-56e104ea5655 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.760025] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for the task: (returnval){ [ 1573.760025] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52088640-c409-5c05-7b23-cc78ac207366" [ 1573.760025] env[63241]: _type = "Task" [ 1573.760025] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.767843] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52088640-c409-5c05-7b23-cc78ac207366, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.873276] env[63241]: DEBUG nova.network.neutron [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Updated VIF entry in instance network info cache for port a8a4fec8-526a-4a17-8012-e0b4703e0b50. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1573.873765] env[63241]: DEBUG nova.network.neutron [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Updating instance_info_cache with network_info: [{"id": "a8a4fec8-526a-4a17-8012-e0b4703e0b50", "address": "fa:16:3e:12:ac:e0", "network": {"id": "9a953b80-cf2c-4a1c-b81c-ade42bc90e20", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-958847003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b735b36ffab04201b2eed0bccc92c07a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8a4fec8-52", "ovs_interfaceid": "a8a4fec8-526a-4a17-8012-e0b4703e0b50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.917658] env[63241]: DEBUG oslo_vmware.api [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820432, 'name': PowerOnVM_Task, 'duration_secs': 0.487873} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.917658] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1573.917658] env[63241]: INFO nova.compute.manager [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Took 9.13 seconds to spawn the instance on the hypervisor. [ 1573.917658] env[63241]: DEBUG nova.compute.manager [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1573.918090] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96039909-68b8-48f1-8531-ac923d575cb4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.008872] env[63241]: DEBUG nova.scheduler.client.report [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1574.271716] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52088640-c409-5c05-7b23-cc78ac207366, 'name': SearchDatastore_Task, 'duration_secs': 0.008957} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.272865] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.273720] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d9195a6-9fed-4de5-b59f-3f96ff0a0c7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.278759] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for the task: (returnval){ [ 1574.278759] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523a4123-a943-772b-70f3-39f9a10def6f" [ 1574.278759] env[63241]: _type = "Task" [ 1574.278759] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.286944] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523a4123-a943-772b-70f3-39f9a10def6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.377820] env[63241]: DEBUG oslo_concurrency.lockutils [req-faca03d8-d7cf-4683-967e-25fa12149cdd req-37316c2b-999e-4abf-ab62-bc141424f08e service nova] Releasing lock "refresh_cache-f4eb5e49-bae0-435c-93f0-15d6939f9e7c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.435286] env[63241]: INFO nova.compute.manager [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Took 60.53 seconds to build instance. [ 1574.514665] env[63241]: DEBUG oslo_concurrency.lockutils [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.918s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.518041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.203s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.518041] env[63241]: DEBUG nova.objects.instance [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Lazy-loading 'resources' on Instance uuid 780f3eee-f6c7-4054-8e6e-a370f74dc405 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1574.533452] env[63241]: INFO nova.scheduler.client.report [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Deleted allocations for instance 81854e13-e0c1-43a9-8529-678d56d57bbf [ 1574.678918] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquiring lock "a534b054-2143-41c4-a0fa-028339ecdbbf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.679293] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lock "a534b054-2143-41c4-a0fa-028339ecdbbf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.679522] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquiring lock "a534b054-2143-41c4-a0fa-028339ecdbbf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.679708] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lock "a534b054-2143-41c4-a0fa-028339ecdbbf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.679879] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lock "a534b054-2143-41c4-a0fa-028339ecdbbf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.682114] env[63241]: INFO nova.compute.manager [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Terminating instance [ 1574.683851] env[63241]: DEBUG nova.compute.manager [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1574.684107] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1574.684978] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe367b3-faa2-4dc3-85d6-33cb7f21152e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.694656] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1574.694900] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13548f91-6670-4359-890d-e5560f7641cb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.701524] env[63241]: DEBUG oslo_vmware.api [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1574.701524] env[63241]: value = "task-1820433" [ 1574.701524] env[63241]: _type = "Task" [ 1574.701524] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.710991] env[63241]: DEBUG oslo_vmware.api [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820433, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.789394] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523a4123-a943-772b-70f3-39f9a10def6f, 'name': SearchDatastore_Task, 'duration_secs': 0.011313} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.789607] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.789878] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f4eb5e49-bae0-435c-93f0-15d6939f9e7c/f4eb5e49-bae0-435c-93f0-15d6939f9e7c.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1574.790289] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04d20755-3e36-46cb-9128-85ecacab1cf7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.796734] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for the task: (returnval){ [ 1574.796734] env[63241]: value = "task-1820434" [ 1574.796734] env[63241]: _type = "Task" [ 1574.796734] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.804887] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820434, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.937975] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4eddc77f-a4f4-4b80-92bf-e60a356e3523 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "34d138e6-90b3-4243-bf45-96ae856cd631" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.921s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.041862] env[63241]: DEBUG oslo_concurrency.lockutils [None req-64bc3287-84de-44fa-aa2d-81c235cc09f8 tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "81854e13-e0c1-43a9-8529-678d56d57bbf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.964s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.212365] env[63241]: DEBUG oslo_vmware.api [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820433, 'name': PowerOffVM_Task, 'duration_secs': 0.314976} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.215220] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1575.215413] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1575.215886] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46860ed0-4a4d-4094-a0e1-1825b217ce5a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.311051] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820434, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.312177] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1575.312376] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1575.312554] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Deleting the datastore file [datastore1] a534b054-2143-41c4-a0fa-028339ecdbbf {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1575.312801] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cbfa9a9f-4630-42bb-9cf2-46723d4ac9d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.319566] env[63241]: DEBUG oslo_vmware.api [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for the task: (returnval){ [ 1575.319566] env[63241]: value = "task-1820436" [ 1575.319566] env[63241]: _type = "Task" [ 1575.319566] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.330031] env[63241]: DEBUG oslo_vmware.api [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820436, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.411156] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d89e77c-9238-4252-8c63-bce2d75b97a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.418650] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6134d69-07c9-46a2-a02e-bfaf6f193df9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.451715] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd054109-dca7-4da5-a2d8-1ab0377fc157 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.459239] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a5c5dd-60b6-4ce7-ab81-b3f18b7d53ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.472883] env[63241]: DEBUG nova.compute.provider_tree [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1575.488945] env[63241]: INFO nova.compute.manager [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Rebuilding instance [ 1575.528917] env[63241]: DEBUG nova.compute.manager [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1575.529911] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5044a88-e317-4c40-afb0-6351f7f3f5c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.809761] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820434, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584467} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.810102] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f4eb5e49-bae0-435c-93f0-15d6939f9e7c/f4eb5e49-bae0-435c-93f0-15d6939f9e7c.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1575.810324] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1575.810575] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84a76f5a-dd90-4d13-a6f3-8dba709cd63e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.817817] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for the task: (returnval){ [ 1575.817817] env[63241]: value = "task-1820437" [ 1575.817817] env[63241]: _type = "Task" [ 1575.817817] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.829482] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820437, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.834320] env[63241]: DEBUG oslo_vmware.api [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Task: {'id': task-1820436, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165843} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.834550] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1575.834733] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1575.834976] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1575.835181] env[63241]: INFO nova.compute.manager [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1575.835417] env[63241]: DEBUG oslo.service.loopingcall [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1575.835624] env[63241]: DEBUG nova.compute.manager [-] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1575.835720] env[63241]: DEBUG nova.network.neutron [-] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1575.976410] env[63241]: DEBUG nova.scheduler.client.report [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1576.042018] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1576.042018] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a6d020a-4895-41dc-afd8-fb24cbf68a48 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.049238] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1576.049238] env[63241]: value = "task-1820438" [ 1576.049238] env[63241]: _type = "Task" [ 1576.049238] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.057205] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820438, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.132525] env[63241]: DEBUG nova.compute.manager [req-2f45c1a9-4a4b-4a42-9af6-e5446ad69823 req-bffa35e8-fc88-4a06-8997-01722a9799c6 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Received event network-vif-deleted-c486fc54-8deb-4aab-89c9-18d333e4490e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1576.132525] env[63241]: INFO nova.compute.manager [req-2f45c1a9-4a4b-4a42-9af6-e5446ad69823 req-bffa35e8-fc88-4a06-8997-01722a9799c6 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Neutron deleted interface c486fc54-8deb-4aab-89c9-18d333e4490e; detaching it from the instance and deleting it from the info cache [ 1576.132525] env[63241]: DEBUG nova.network.neutron [req-2f45c1a9-4a4b-4a42-9af6-e5446ad69823 req-bffa35e8-fc88-4a06-8997-01722a9799c6 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.327367] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820437, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09406} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.327660] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1576.328448] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09988eff-1cf4-4a7b-9bc9-1810c3e65005 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.351968] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] f4eb5e49-bae0-435c-93f0-15d6939f9e7c/f4eb5e49-bae0-435c-93f0-15d6939f9e7c.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1576.351968] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-312aa99f-aa49-4491-82e2-be2879aa98ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.371270] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for the task: (returnval){ [ 1576.371270] env[63241]: value = "task-1820439" [ 1576.371270] env[63241]: _type = "Task" [ 1576.371270] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.379645] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820439, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.482073] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.964s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.484555] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.039s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1576.484814] env[63241]: DEBUG nova.objects.instance [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lazy-loading 'resources' on Instance uuid bef91c1c-a418-4464-ae7b-883ffb7e9695 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1576.508319] env[63241]: INFO nova.scheduler.client.report [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Deleted allocations for instance 780f3eee-f6c7-4054-8e6e-a370f74dc405 [ 1576.561200] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820438, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.612058] env[63241]: DEBUG nova.network.neutron [-] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.634865] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-baa34bc5-04cb-4e9e-8f45-5b261811011c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.643959] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ff8648-435d-4744-a7ba-8904c957c7a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.675628] env[63241]: DEBUG nova.compute.manager [req-2f45c1a9-4a4b-4a42-9af6-e5446ad69823 req-bffa35e8-fc88-4a06-8997-01722a9799c6 service nova] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Detach interface failed, port_id=c486fc54-8deb-4aab-89c9-18d333e4490e, reason: Instance a534b054-2143-41c4-a0fa-028339ecdbbf could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1576.881685] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820439, 'name': ReconfigVM_Task, 'duration_secs': 0.3159} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.881976] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Reconfigured VM instance instance-00000039 to attach disk [datastore1] f4eb5e49-bae0-435c-93f0-15d6939f9e7c/f4eb5e49-bae0-435c-93f0-15d6939f9e7c.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1576.882645] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21107b07-fdd5-47c8-9e3b-cb5c66b13af2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.889140] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for the task: (returnval){ [ 1576.889140] env[63241]: value = "task-1820440" [ 1576.889140] env[63241]: _type = "Task" [ 1576.889140] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.897869] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820440, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.020155] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3aa35de2-c42d-4bf0-ab9d-c76258c3ed69 tempest-ServersV294TestFqdnHostnames-1109571372 tempest-ServersV294TestFqdnHostnames-1109571372-project-member] Lock "780f3eee-f6c7-4054-8e6e-a370f74dc405" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.588s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.061874] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820438, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.115904] env[63241]: INFO nova.compute.manager [-] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Took 1.28 seconds to deallocate network for instance. [ 1577.313432] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a628916e-ed52-4033-8da6-eb082e97f31b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.320515] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e32efcd-786a-4af4-8201-e1260fd09ca9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.350334] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bbc53d-0e33-4822-a9c7-1f97c12ca4a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.358124] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e502e52a-66d2-413d-acc4-80d7f7633b7c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.371024] env[63241]: DEBUG nova.compute.provider_tree [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1577.397647] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820440, 'name': Rename_Task, 'duration_secs': 0.162184} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.397917] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1577.398160] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46c400d5-8f4d-4232-8250-84a1fdf599cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.404689] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for the task: (returnval){ [ 1577.404689] env[63241]: value = "task-1820441" [ 1577.404689] env[63241]: _type = "Task" [ 1577.404689] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.412181] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820441, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.561589] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820438, 'name': PowerOffVM_Task, 'duration_secs': 1.074844} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.561861] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1577.562096] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1577.562867] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f303f0c1-92c2-4ba0-baf2-4dd672a31403 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.569636] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1577.569863] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76452a21-2166-40e9-80b4-4e2e684bf6b5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.625977] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.636844] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1577.637078] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1577.637267] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleting the datastore file [datastore1] 34d138e6-90b3-4243-bf45-96ae856cd631 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1577.637527] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9fde9a3-104e-4f9e-8712-08f3cf575e37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.643513] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1577.643513] env[63241]: value = "task-1820443" [ 1577.643513] env[63241]: _type = "Task" [ 1577.643513] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.652986] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820443, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.874970] env[63241]: DEBUG nova.scheduler.client.report [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1577.916051] env[63241]: DEBUG oslo_vmware.api [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820441, 'name': PowerOnVM_Task, 'duration_secs': 0.488289} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.916784] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1577.916784] env[63241]: INFO nova.compute.manager [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Took 8.41 seconds to spawn the instance on the hypervisor. [ 1577.916784] env[63241]: DEBUG nova.compute.manager [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1577.917445] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd10bf9-0ae1-4aee-8df9-53c2f97e71a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.155851] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820443, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145359} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.156256] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1578.157549] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1578.157799] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1578.381148] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.896s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.382828] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 49.204s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.405124] env[63241]: INFO nova.scheduler.client.report [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Deleted allocations for instance bef91c1c-a418-4464-ae7b-883ffb7e9695 [ 1578.436279] env[63241]: INFO nova.compute.manager [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Took 59.48 seconds to build instance. [ 1578.888271] env[63241]: INFO nova.compute.claims [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1578.915613] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6242ac74-207c-4a60-aeee-7b205b799cea tempest-ListServersNegativeTestJSON-213631973 tempest-ListServersNegativeTestJSON-213631973-project-member] Lock "bef91c1c-a418-4464-ae7b-883ffb7e9695" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.765s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.938710] env[63241]: DEBUG oslo_concurrency.lockutils [None req-17539f1d-09fd-4fdf-842b-05282a012648 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.998s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.155208] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquiring lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.155576] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.155808] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquiring lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.155990] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.156203] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.160794] env[63241]: INFO nova.compute.manager [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Terminating instance [ 1579.164162] env[63241]: DEBUG nova.compute.manager [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1579.164262] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1579.165158] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b2a7db-b458-4f1e-8753-de5a978bc116 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.176172] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1579.176172] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8584e21-eaeb-4e82-8313-735aeee028f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.183451] env[63241]: DEBUG oslo_vmware.api [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for the task: (returnval){ [ 1579.183451] env[63241]: value = "task-1820444" [ 1579.183451] env[63241]: _type = "Task" [ 1579.183451] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.194306] env[63241]: DEBUG oslo_vmware.api [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820444, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.219810] env[63241]: DEBUG nova.virt.hardware [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1579.220098] env[63241]: DEBUG nova.virt.hardware [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1579.220265] env[63241]: DEBUG nova.virt.hardware [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1579.220456] env[63241]: DEBUG nova.virt.hardware [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1579.220646] env[63241]: DEBUG nova.virt.hardware [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1579.220802] env[63241]: DEBUG nova.virt.hardware [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1579.221128] env[63241]: DEBUG nova.virt.hardware [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1579.221904] env[63241]: DEBUG nova.virt.hardware [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1579.221904] env[63241]: DEBUG nova.virt.hardware [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1579.221904] env[63241]: DEBUG nova.virt.hardware [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1579.221982] env[63241]: DEBUG nova.virt.hardware [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1579.222986] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a85bbc-537e-4a3d-b1bd-21b9605bf390 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.232893] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755b7ca7-2f2d-44e4-bffd-38406a5c2b5a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.249726] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:62:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '111a2767-1b06-4fe5-852b-40c9b5a699fd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8247f3da-5f40-4223-aad2-aa20105cd12d', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1579.265577] env[63241]: DEBUG oslo.service.loopingcall [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1579.265577] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1579.265577] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb200fee-0742-4fbd-98ef-cde1705eee98 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.285935] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1579.285935] env[63241]: value = "task-1820445" [ 1579.285935] env[63241]: _type = "Task" [ 1579.285935] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.295286] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820445, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.395416] env[63241]: INFO nova.compute.resource_tracker [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating resource usage from migration dbead17e-aa42-4c50-ae83-6d0d9b03d450 [ 1579.692644] env[63241]: DEBUG oslo_vmware.api [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820444, 'name': PowerOffVM_Task, 'duration_secs': 0.190818} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.697175] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1579.697175] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1579.697175] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5baa3d2-a071-4c92-8e47-3845e4e88025 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.787114] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe35d91-1bf4-47f3-a1b9-f0bd164b422c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.796018] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1579.796018] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1579.796018] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Deleting the datastore file [datastore1] f4eb5e49-bae0-435c-93f0-15d6939f9e7c {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1579.798202] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c888703-2eac-4a18-a92f-9e718872e940 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.804729] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc46a9dd-ebc9-4ead-8b9f-8a1a66fec99e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.808494] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820445, 'name': CreateVM_Task, 'duration_secs': 0.340988} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.808756] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1579.809821] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.810080] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.810479] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1579.810786] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7225680-bb01-48ca-86e4-5479bf1d763e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.849133] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca475218-072b-4d38-ba86-80ad1f68d8d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.853092] env[63241]: DEBUG oslo_vmware.api [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for the task: (returnval){ [ 1579.853092] env[63241]: value = "task-1820447" [ 1579.853092] env[63241]: _type = "Task" [ 1579.853092] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.855220] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1579.855220] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52279093-1c08-814a-c94e-3a8c43795e09" [ 1579.855220] env[63241]: _type = "Task" [ 1579.855220] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.867034] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbaa3323-b5b6-4bee-939d-3eee62458cd4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.877218] env[63241]: DEBUG oslo_vmware.api [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820447, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.882052] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52279093-1c08-814a-c94e-3a8c43795e09, 'name': SearchDatastore_Task, 'duration_secs': 0.010497} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.890739] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.891102] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1579.891430] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.891632] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.891881] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1579.892536] env[63241]: DEBUG nova.compute.provider_tree [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1579.893978] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16924ac9-3c78-4ec1-9a5a-0d4edfb37429 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.902570] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1579.902757] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1579.903754] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab3fb57e-bf03-436c-a2b5-062e64b0891e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.908931] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1579.908931] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f7c0c8-8166-bcc7-de26-8830cefdf010" [ 1579.908931] env[63241]: _type = "Task" [ 1579.908931] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.916834] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f7c0c8-8166-bcc7-de26-8830cefdf010, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.365216] env[63241]: DEBUG oslo_vmware.api [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Task: {'id': task-1820447, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174003} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.365666] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1580.365981] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1580.366391] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1580.366694] env[63241]: INFO nova.compute.manager [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1580.367956] env[63241]: DEBUG oslo.service.loopingcall [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1580.367956] env[63241]: DEBUG nova.compute.manager [-] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1580.367956] env[63241]: DEBUG nova.network.neutron [-] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1580.398536] env[63241]: DEBUG nova.scheduler.client.report [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1580.425091] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f7c0c8-8166-bcc7-de26-8830cefdf010, 'name': SearchDatastore_Task, 'duration_secs': 0.011148} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.426253] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6970728-823f-4dd0-8604-4b54b3f826ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.435580] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1580.435580] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e06057-c618-46e3-1910-6030f6b20e06" [ 1580.435580] env[63241]: _type = "Task" [ 1580.435580] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.445082] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e06057-c618-46e3-1910-6030f6b20e06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.739431] env[63241]: DEBUG nova.compute.manager [req-f62b7098-c984-4e39-ab42-2f88fda7e047 req-fde0c1c9-b211-4bdb-9f82-b1542add9ee6 service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Received event network-vif-deleted-a8a4fec8-526a-4a17-8012-e0b4703e0b50 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1580.739627] env[63241]: INFO nova.compute.manager [req-f62b7098-c984-4e39-ab42-2f88fda7e047 req-fde0c1c9-b211-4bdb-9f82-b1542add9ee6 service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Neutron deleted interface a8a4fec8-526a-4a17-8012-e0b4703e0b50; detaching it from the instance and deleting it from the info cache [ 1580.739794] env[63241]: DEBUG nova.network.neutron [req-f62b7098-c984-4e39-ab42-2f88fda7e047 req-fde0c1c9-b211-4bdb-9f82-b1542add9ee6 service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.909427] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.526s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.909635] env[63241]: INFO nova.compute.manager [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Migrating [ 1580.909915] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.910085] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "compute-rpcapi-router" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.911750] env[63241]: DEBUG oslo_concurrency.lockutils [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 51.188s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.912049] env[63241]: DEBUG nova.objects.instance [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Lazy-loading 'resources' on Instance uuid 3dfeaf57-2244-418e-a04a-ed4143e454d5 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1580.946161] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e06057-c618-46e3-1910-6030f6b20e06, 'name': SearchDatastore_Task, 'duration_secs': 0.029684} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.946415] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.946694] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 34d138e6-90b3-4243-bf45-96ae856cd631/34d138e6-90b3-4243-bf45-96ae856cd631.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1580.946947] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc0df011-91d1-468a-91eb-fdb3ea8697c3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.953091] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1580.953091] env[63241]: value = "task-1820448" [ 1580.953091] env[63241]: _type = "Task" [ 1580.953091] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.961470] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.218598] env[63241]: DEBUG nova.network.neutron [-] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.245218] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54d2842c-77b0-4dbb-8f03-8384da5fce44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.254909] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d0d093-5ab9-47b3-afd7-8be7b584b1c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.295829] env[63241]: DEBUG nova.compute.manager [req-f62b7098-c984-4e39-ab42-2f88fda7e047 req-fde0c1c9-b211-4bdb-9f82-b1542add9ee6 service nova] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Detach interface failed, port_id=a8a4fec8-526a-4a17-8012-e0b4703e0b50, reason: Instance f4eb5e49-bae0-435c-93f0-15d6939f9e7c could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1581.422852] env[63241]: INFO nova.compute.rpcapi [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 1581.423328] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "compute-rpcapi-router" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.473987] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820448, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.720382] env[63241]: INFO nova.compute.manager [-] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Took 1.35 seconds to deallocate network for instance. [ 1581.871439] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b802aff2-8aea-4d55-b7f3-e82223f4129d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.880208] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4a9698-dcc2-4b17-92f6-16ee59b397fc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.913754] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b8a17d-0a26-4fdd-a582-e9de8001ccb4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.921859] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a088bf2b-b2f3-4e89-9812-ab1a211dc744 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.936101] env[63241]: DEBUG nova.compute.provider_tree [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1581.944308] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.944308] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.944308] env[63241]: DEBUG nova.network.neutron [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1581.969491] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820448, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.748851} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.969777] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 34d138e6-90b3-4243-bf45-96ae856cd631/34d138e6-90b3-4243-bf45-96ae856cd631.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1581.970032] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1581.974126] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-10cee70f-de35-4f94-8357-b623db63270e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.978032] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1581.978032] env[63241]: value = "task-1820449" [ 1581.978032] env[63241]: _type = "Task" [ 1581.978032] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.986990] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820449, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.230272] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.441433] env[63241]: DEBUG nova.scheduler.client.report [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1582.488992] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820449, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101898} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.493039] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1582.493039] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a775e113-592b-406c-a086-626f6750847f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.517825] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 34d138e6-90b3-4243-bf45-96ae856cd631/34d138e6-90b3-4243-bf45-96ae856cd631.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1582.519546] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46360d2f-0db2-44d9-ac22-40ebbd76a5af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.545619] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1582.545619] env[63241]: value = "task-1820450" [ 1582.545619] env[63241]: _type = "Task" [ 1582.545619] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.559793] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820450, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.853673] env[63241]: DEBUG nova.network.neutron [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance_info_cache with network_info: [{"id": "0457ca89-42e2-485c-a958-773620259283", "address": "fa:16:3e:e2:4e:b4", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0457ca89-42", "ovs_interfaceid": "0457ca89-42e2-485c-a958-773620259283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.948479] env[63241]: DEBUG oslo_concurrency.lockutils [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.951398] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 51.389s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.952882] env[63241]: INFO nova.compute.claims [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1582.989789] env[63241]: INFO nova.scheduler.client.report [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Deleted allocations for instance 3dfeaf57-2244-418e-a04a-ed4143e454d5 [ 1583.059499] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820450, 'name': ReconfigVM_Task, 'duration_secs': 0.291743} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.059822] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 34d138e6-90b3-4243-bf45-96ae856cd631/34d138e6-90b3-4243-bf45-96ae856cd631.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1583.061113] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74c0dd27-042a-4b0f-b6a0-a5d15f43f778 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.068473] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1583.068473] env[63241]: value = "task-1820451" [ 1583.068473] env[63241]: _type = "Task" [ 1583.068473] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.077952] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820451, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.356794] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.503837] env[63241]: DEBUG oslo_concurrency.lockutils [None req-16131790-13a3-4674-a4a1-23848c50c828 tempest-ServerAddressesNegativeTestJSON-815809384 tempest-ServerAddressesNegativeTestJSON-815809384-project-member] Lock "3dfeaf57-2244-418e-a04a-ed4143e454d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 57.750s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.582242] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820451, 'name': Rename_Task, 'duration_secs': 0.211035} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.583038] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1583.583095] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c14b33e5-dc5e-4765-b298-25b05aed279a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.591949] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1583.591949] env[63241]: value = "task-1820452" [ 1583.591949] env[63241]: _type = "Task" [ 1583.591949] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.600373] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820452, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.711193] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquiring lock "20c7a1a1-4396-414f-a52c-06551722b6eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.711193] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "20c7a1a1-4396-414f-a52c-06551722b6eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.103196] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820452, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.213489] env[63241]: DEBUG nova.compute.manager [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1584.463949] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff5396b-23e6-4f99-abce-ee22293e295c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.473477] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7110875-898e-4b69-81d9-2b97d6793451 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.506634] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb1a5dd-2187-4d80-a114-d6a7d346a77c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.514763] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db4ff95-709c-46ee-9ed3-1091ae4528a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.530064] env[63241]: DEBUG nova.compute.provider_tree [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1584.602796] env[63241]: DEBUG oslo_vmware.api [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820452, 'name': PowerOnVM_Task, 'duration_secs': 0.518207} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.603155] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1584.603437] env[63241]: DEBUG nova.compute.manager [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1584.604406] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3d4013-7807-4ab5-8560-c28757d62ee5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.743300] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.880796] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c716c0b2-c5d6-4b45-a54b-7ec9e8689a2e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.898976] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance '965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce' progress to 0 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1585.034754] env[63241]: DEBUG nova.scheduler.client.report [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1585.128386] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.406424] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1585.406747] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c576b25-0e09-479b-a40b-731bd44a1b1e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.415458] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1585.415458] env[63241]: value = "task-1820453" [ 1585.415458] env[63241]: _type = "Task" [ 1585.415458] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.426717] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820453, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.461771] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "fb5d60fa-fa13-44a1-8291-4645761a0c80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.462556] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "fb5d60fa-fa13-44a1-8291-4645761a0c80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.538931] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.539583] env[63241]: DEBUG nova.compute.manager [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1585.542391] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 51.138s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.542575] env[63241]: DEBUG nova.objects.instance [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1585.927180] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820453, 'name': PowerOffVM_Task, 'duration_secs': 0.260138} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.927492] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1585.927678] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance '965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce' progress to 17 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1585.965089] env[63241]: DEBUG nova.compute.manager [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1586.049053] env[63241]: DEBUG nova.compute.utils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1586.051589] env[63241]: DEBUG nova.compute.manager [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1586.051849] env[63241]: DEBUG nova.network.neutron [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1586.121757] env[63241]: DEBUG nova.policy [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '763cbc8bbad74e9d861df4da2d112a54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4d6ab42fb064ba7a1aea224c8396c15', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1586.126912] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.127546] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.434670] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1586.435234] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1586.435392] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1586.435584] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1586.435723] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1586.435863] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1586.436136] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1586.436335] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1586.436538] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1586.437917] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1586.437917] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1586.442810] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e3ffaf2-9a87-4551-8bd1-d8a6a45f967f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.460106] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1586.460106] env[63241]: value = "task-1820454" [ 1586.460106] env[63241]: _type = "Task" [ 1586.460106] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.468643] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820454, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.498308] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.552363] env[63241]: DEBUG nova.compute.manager [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1586.556671] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5449477f-36d8-4f51-ab07-9448ce0f79d3 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.557834] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 48.977s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.562291] env[63241]: INFO nova.compute.claims [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1586.589163] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "34d138e6-90b3-4243-bf45-96ae856cd631" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.589163] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "34d138e6-90b3-4243-bf45-96ae856cd631" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.589512] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "34d138e6-90b3-4243-bf45-96ae856cd631-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.590227] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "34d138e6-90b3-4243-bf45-96ae856cd631-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.590227] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "34d138e6-90b3-4243-bf45-96ae856cd631-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.595966] env[63241]: INFO nova.compute.manager [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Terminating instance [ 1586.598840] env[63241]: DEBUG nova.compute.manager [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1586.599238] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1586.600963] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d31a06-52cb-4dda-b03f-98f6e9f83d76 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.620817] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1586.621279] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89beeb55-e444-48b8-9a59-247b1cd3d957 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.630563] env[63241]: DEBUG oslo_vmware.api [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1586.630563] env[63241]: value = "task-1820455" [ 1586.630563] env[63241]: _type = "Task" [ 1586.630563] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.639713] env[63241]: DEBUG oslo_vmware.api [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.697024] env[63241]: DEBUG nova.network.neutron [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Successfully created port: 2dcf61ef-f6c5-4404-93d1-eb41df5f55f5 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1586.975292] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820454, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.142983] env[63241]: DEBUG oslo_vmware.api [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820455, 'name': PowerOffVM_Task, 'duration_secs': 0.418188} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.143402] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1587.143635] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1587.143941] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6cde5286-c40a-4b4e-8edf-72fff7d48fe9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.225562] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1587.225562] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1587.225713] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleting the datastore file [datastore1] 34d138e6-90b3-4243-bf45-96ae856cd631 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1587.225940] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-333624dc-6ae9-4752-aef1-62e8b545bcd5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.232827] env[63241]: DEBUG oslo_vmware.api [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1587.232827] env[63241]: value = "task-1820457" [ 1587.232827] env[63241]: _type = "Task" [ 1587.232827] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.240638] env[63241]: DEBUG oslo_vmware.api [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.331499] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.331775] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.471045] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820454, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.568120] env[63241]: DEBUG nova.compute.manager [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1587.593818] env[63241]: DEBUG nova.virt.hardware [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1587.594083] env[63241]: DEBUG nova.virt.hardware [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1587.594244] env[63241]: DEBUG nova.virt.hardware [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1587.594451] env[63241]: DEBUG nova.virt.hardware [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1587.594619] env[63241]: DEBUG nova.virt.hardware [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1587.594769] env[63241]: DEBUG nova.virt.hardware [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1587.594989] env[63241]: DEBUG nova.virt.hardware [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1587.595261] env[63241]: DEBUG nova.virt.hardware [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1587.595478] env[63241]: DEBUG nova.virt.hardware [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1587.595646] env[63241]: DEBUG nova.virt.hardware [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1587.595835] env[63241]: DEBUG nova.virt.hardware [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1587.596772] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20b67bd-772d-4ed2-92f0-323bb652d58f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.606795] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261c6480-1751-424f-b106-aff6818416df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.741788] env[63241]: DEBUG oslo_vmware.api [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820457, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145584} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.744090] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1587.744285] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1587.744464] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1587.744638] env[63241]: INFO nova.compute.manager [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1587.744888] env[63241]: DEBUG oslo.service.loopingcall [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1587.745275] env[63241]: DEBUG nova.compute.manager [-] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1587.745344] env[63241]: DEBUG nova.network.neutron [-] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1587.929055] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91c6c35-64d5-4354-8b2a-cdbe0cbac96d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.937191] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff85ff6-22b7-4131-b101-74b5914f2e17 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.972035] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342f63a5-6dba-4637-8feb-4ea1d3bfd686 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.982237] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52782baf-3df7-43fc-9fc7-06b64f600af0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.986054] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820454, 'name': ReconfigVM_Task, 'duration_secs': 1.193777} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.986390] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance '965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce' progress to 33 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1587.999729] env[63241]: DEBUG nova.compute.provider_tree [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1588.015736] env[63241]: DEBUG nova.compute.manager [req-0ca6955e-e156-4bc6-a1a8-b5e269b6a988 req-b2ea3e35-3005-44bc-a0d1-df147b41c0c4 service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Received event network-vif-deleted-8247f3da-5f40-4223-aad2-aa20105cd12d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1588.015948] env[63241]: INFO nova.compute.manager [req-0ca6955e-e156-4bc6-a1a8-b5e269b6a988 req-b2ea3e35-3005-44bc-a0d1-df147b41c0c4 service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Neutron deleted interface 8247f3da-5f40-4223-aad2-aa20105cd12d; detaching it from the instance and deleting it from the info cache [ 1588.016175] env[63241]: DEBUG nova.network.neutron [req-0ca6955e-e156-4bc6-a1a8-b5e269b6a988 req-b2ea3e35-3005-44bc-a0d1-df147b41c0c4 service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1588.297234] env[63241]: DEBUG nova.compute.manager [req-549931a1-8f6f-4c8f-b725-2ed270706c28 req-ded8f7ff-b86e-4c3e-babd-f5d599ac60f1 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Received event network-vif-plugged-2dcf61ef-f6c5-4404-93d1-eb41df5f55f5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1588.297464] env[63241]: DEBUG oslo_concurrency.lockutils [req-549931a1-8f6f-4c8f-b725-2ed270706c28 req-ded8f7ff-b86e-4c3e-babd-f5d599ac60f1 service nova] Acquiring lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.297702] env[63241]: DEBUG oslo_concurrency.lockutils [req-549931a1-8f6f-4c8f-b725-2ed270706c28 req-ded8f7ff-b86e-4c3e-babd-f5d599ac60f1 service nova] Lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.297893] env[63241]: DEBUG oslo_concurrency.lockutils [req-549931a1-8f6f-4c8f-b725-2ed270706c28 req-ded8f7ff-b86e-4c3e-babd-f5d599ac60f1 service nova] Lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.298185] env[63241]: DEBUG nova.compute.manager [req-549931a1-8f6f-4c8f-b725-2ed270706c28 req-ded8f7ff-b86e-4c3e-babd-f5d599ac60f1 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] No waiting events found dispatching network-vif-plugged-2dcf61ef-f6c5-4404-93d1-eb41df5f55f5 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1588.298494] env[63241]: WARNING nova.compute.manager [req-549931a1-8f6f-4c8f-b725-2ed270706c28 req-ded8f7ff-b86e-4c3e-babd-f5d599ac60f1 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Received unexpected event network-vif-plugged-2dcf61ef-f6c5-4404-93d1-eb41df5f55f5 for instance with vm_state building and task_state spawning. [ 1588.420801] env[63241]: DEBUG nova.network.neutron [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Successfully updated port: 2dcf61ef-f6c5-4404-93d1-eb41df5f55f5 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1588.494563] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1588.494831] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1588.494989] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1588.495188] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1588.495355] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1588.495479] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1588.495677] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1588.495834] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1588.495997] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1588.496178] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1588.496354] env[63241]: DEBUG nova.virt.hardware [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1588.501734] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Reconfiguring VM instance instance-00000031 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1588.502304] env[63241]: DEBUG nova.network.neutron [-] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1588.505595] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f6c786f-14c3-4951-b31f-3db69322884c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.519220] env[63241]: INFO nova.compute.manager [-] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Took 0.77 seconds to deallocate network for instance. [ 1588.520884] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d3eb955-78d9-422b-a16e-7b588fbb7f4b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.529334] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1588.529334] env[63241]: value = "task-1820458" [ 1588.529334] env[63241]: _type = "Task" [ 1588.529334] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.534689] env[63241]: ERROR nova.scheduler.client.report [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [req-333768c9-a093-4044-b1d9-7ae39975bfb3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-333768c9-a093-4044-b1d9-7ae39975bfb3"}]} [ 1588.538457] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b25086-aac4-4ed6-9957-e577f47a86cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.558729] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820458, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.572506] env[63241]: DEBUG nova.compute.manager [req-0ca6955e-e156-4bc6-a1a8-b5e269b6a988 req-b2ea3e35-3005-44bc-a0d1-df147b41c0c4 service nova] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Detach interface failed, port_id=8247f3da-5f40-4223-aad2-aa20105cd12d, reason: Instance 34d138e6-90b3-4243-bf45-96ae856cd631 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1588.573729] env[63241]: DEBUG nova.scheduler.client.report [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1588.587834] env[63241]: DEBUG nova.scheduler.client.report [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1588.588062] env[63241]: DEBUG nova.compute.provider_tree [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1588.598281] env[63241]: DEBUG nova.scheduler.client.report [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1588.615287] env[63241]: DEBUG nova.scheduler.client.report [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1588.923339] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquiring lock "refresh_cache-9b61cee5-65b4-499e-80fd-c6ce6f79dd13" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.923448] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquired lock "refresh_cache-9b61cee5-65b4-499e-80fd-c6ce6f79dd13" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.923625] env[63241]: DEBUG nova.network.neutron [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1588.952470] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b328d6ad-337c-4b7a-89b6-eaee909231a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.960796] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43aa039-fa1f-476f-8899-7c4dea4a3766 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.994226] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a81024b-2e38-41ad-8d22-5b8ab0831f6b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.002208] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac260c6b-beda-4bb6-8ec9-387f7ce1b732 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.016943] env[63241]: DEBUG nova.compute.provider_tree [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1589.027053] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.040379] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820458, 'name': ReconfigVM_Task, 'duration_secs': 0.161609} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.040545] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Reconfigured VM instance instance-00000031 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1589.041464] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb0db8d-434d-4f53-bc7f-447097faeefc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.064064] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce/965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1589.064624] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15db11fb-0f20-4ba9-8770-4f474f0fed4f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.082845] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1589.082845] env[63241]: value = "task-1820459" [ 1589.082845] env[63241]: _type = "Task" [ 1589.082845] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.091208] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820459, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.454294] env[63241]: DEBUG nova.network.neutron [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1589.549656] env[63241]: DEBUG nova.scheduler.client.report [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 88 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1589.550498] env[63241]: DEBUG nova.compute.provider_tree [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 88 to 89 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1589.550498] env[63241]: DEBUG nova.compute.provider_tree [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1589.592470] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820459, 'name': ReconfigVM_Task, 'duration_secs': 0.26869} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.592765] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce/965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1589.592993] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance '965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce' progress to 50 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1589.614685] env[63241]: DEBUG nova.network.neutron [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Updating instance_info_cache with network_info: [{"id": "2dcf61ef-f6c5-4404-93d1-eb41df5f55f5", "address": "fa:16:3e:03:3e:b2", "network": {"id": "dbf148f3-396b-4dca-b479-3acc0f4af87e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1962302854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4d6ab42fb064ba7a1aea224c8396c15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dcf61ef-f6", "ovs_interfaceid": "2dcf61ef-f6c5-4404-93d1-eb41df5f55f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.055774] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.498s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.056459] env[63241]: DEBUG nova.compute.manager [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1590.059426] env[63241]: DEBUG oslo_concurrency.lockutils [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 51.556s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.059510] env[63241]: DEBUG nova.objects.instance [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lazy-loading 'resources' on Instance uuid e753da08-d4a5-4f17-85c8-154e843798c9 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1590.099033] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f69dccd-3b64-440d-bd73-88c2e5336082 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.120519] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Releasing lock "refresh_cache-9b61cee5-65b4-499e-80fd-c6ce6f79dd13" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.120882] env[63241]: DEBUG nova.compute.manager [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Instance network_info: |[{"id": "2dcf61ef-f6c5-4404-93d1-eb41df5f55f5", "address": "fa:16:3e:03:3e:b2", "network": {"id": "dbf148f3-396b-4dca-b479-3acc0f4af87e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1962302854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4d6ab42fb064ba7a1aea224c8396c15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dcf61ef-f6", "ovs_interfaceid": "2dcf61ef-f6c5-4404-93d1-eb41df5f55f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1590.121579] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:3e:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c1b8689-a9b4-4972-beb9-6a1c8de1dc88', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2dcf61ef-f6c5-4404-93d1-eb41df5f55f5', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1590.129370] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Creating folder: Project (d4d6ab42fb064ba7a1aea224c8396c15). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1590.130419] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f219b9ae-f7df-48d2-a488-9800eee2a00b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.133230] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f807875f-5b73-41b9-9d4c-eccce08945e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.151739] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance '965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce' progress to 67 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1590.156215] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Created folder: Project (d4d6ab42fb064ba7a1aea224c8396c15) in parent group-v376927. [ 1590.156441] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Creating folder: Instances. Parent ref: group-v377093. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1590.156889] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-592b86f6-3bba-49c1-8519-09ac3e6a8085 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.165287] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Created folder: Instances in parent group-v377093. [ 1590.165536] env[63241]: DEBUG oslo.service.loopingcall [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1590.165733] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1590.165926] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0d38217-09c6-4ae9-891d-61dd78be37e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.186346] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1590.186346] env[63241]: value = "task-1820462" [ 1590.186346] env[63241]: _type = "Task" [ 1590.186346] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.193565] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820462, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.323283] env[63241]: DEBUG nova.compute.manager [req-89bb233c-de08-4c4d-933a-fe9160b25788 req-e60107db-2815-45be-8001-f8020e6a0d53 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Received event network-changed-2dcf61ef-f6c5-4404-93d1-eb41df5f55f5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1590.323511] env[63241]: DEBUG nova.compute.manager [req-89bb233c-de08-4c4d-933a-fe9160b25788 req-e60107db-2815-45be-8001-f8020e6a0d53 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Refreshing instance network info cache due to event network-changed-2dcf61ef-f6c5-4404-93d1-eb41df5f55f5. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1590.323711] env[63241]: DEBUG oslo_concurrency.lockutils [req-89bb233c-de08-4c4d-933a-fe9160b25788 req-e60107db-2815-45be-8001-f8020e6a0d53 service nova] Acquiring lock "refresh_cache-9b61cee5-65b4-499e-80fd-c6ce6f79dd13" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.323853] env[63241]: DEBUG oslo_concurrency.lockutils [req-89bb233c-de08-4c4d-933a-fe9160b25788 req-e60107db-2815-45be-8001-f8020e6a0d53 service nova] Acquired lock "refresh_cache-9b61cee5-65b4-499e-80fd-c6ce6f79dd13" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1590.324018] env[63241]: DEBUG nova.network.neutron [req-89bb233c-de08-4c4d-933a-fe9160b25788 req-e60107db-2815-45be-8001-f8020e6a0d53 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Refreshing network info cache for port 2dcf61ef-f6c5-4404-93d1-eb41df5f55f5 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1590.563052] env[63241]: DEBUG nova.compute.utils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1590.567564] env[63241]: DEBUG nova.compute.manager [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1590.567564] env[63241]: DEBUG nova.network.neutron [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1590.612357] env[63241]: DEBUG nova.policy [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78657a2bc34d4bb9922678ed287530f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18094134f49b4e84b83e97631bc22903', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1590.694062] env[63241]: DEBUG nova.network.neutron [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Port 0457ca89-42e2-485c-a958-773620259283 binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1590.698647] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820462, 'name': CreateVM_Task, 'duration_secs': 0.34985} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.698809] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1590.699486] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.699677] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1590.700252] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1590.700748] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55c6e5d4-0b15-495f-95ea-20d9db0ac5a4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.707928] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for the task: (returnval){ [ 1590.707928] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523b6014-fd6f-c425-2154-4587400136d4" [ 1590.707928] env[63241]: _type = "Task" [ 1590.707928] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.716250] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523b6014-fd6f-c425-2154-4587400136d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.883987] env[63241]: DEBUG nova.network.neutron [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Successfully created port: b4bd3b82-c81b-4cde-a17e-1b6f2f52add3 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1590.920159] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9831afe2-3758-4e21-992f-841c33b94909 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.928418] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43389d2-d9ba-457b-ab21-707fac5ced93 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.961693] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026f4fc8-f532-46fd-b704-6a66074ff985 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.970571] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d120ca2f-2477-44fd-a0d0-d7b57b1feb36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.984404] env[63241]: DEBUG nova.compute.provider_tree [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1591.056731] env[63241]: DEBUG nova.network.neutron [req-89bb233c-de08-4c4d-933a-fe9160b25788 req-e60107db-2815-45be-8001-f8020e6a0d53 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Updated VIF entry in instance network info cache for port 2dcf61ef-f6c5-4404-93d1-eb41df5f55f5. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1591.057059] env[63241]: DEBUG nova.network.neutron [req-89bb233c-de08-4c4d-933a-fe9160b25788 req-e60107db-2815-45be-8001-f8020e6a0d53 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Updating instance_info_cache with network_info: [{"id": "2dcf61ef-f6c5-4404-93d1-eb41df5f55f5", "address": "fa:16:3e:03:3e:b2", "network": {"id": "dbf148f3-396b-4dca-b479-3acc0f4af87e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1962302854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4d6ab42fb064ba7a1aea224c8396c15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dcf61ef-f6", "ovs_interfaceid": "2dcf61ef-f6c5-4404-93d1-eb41df5f55f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.067492] env[63241]: DEBUG nova.compute.manager [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1591.218055] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523b6014-fd6f-c425-2154-4587400136d4, 'name': SearchDatastore_Task, 'duration_secs': 0.010186} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.218369] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.218596] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1591.218825] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.218969] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.219164] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1591.219484] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06ae216a-1722-4551-a12e-677a260515ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.227913] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1591.227913] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1591.229030] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aef2d115-c017-4765-9db7-c84d1e053268 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.233983] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for the task: (returnval){ [ 1591.233983] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5230dd0d-8b76-fed3-4480-0ca334aa5599" [ 1591.233983] env[63241]: _type = "Task" [ 1591.233983] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.241787] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5230dd0d-8b76-fed3-4480-0ca334aa5599, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.490776] env[63241]: DEBUG nova.scheduler.client.report [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1591.559679] env[63241]: DEBUG oslo_concurrency.lockutils [req-89bb233c-de08-4c4d-933a-fe9160b25788 req-e60107db-2815-45be-8001-f8020e6a0d53 service nova] Releasing lock "refresh_cache-9b61cee5-65b4-499e-80fd-c6ce6f79dd13" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.721777] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.721777] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.722083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.746421] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5230dd0d-8b76-fed3-4480-0ca334aa5599, 'name': SearchDatastore_Task, 'duration_secs': 0.00811} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.747253] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a9558a4-35b7-47f4-8492-0a45d9663502 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.752804] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for the task: (returnval){ [ 1591.752804] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520e13e8-7b36-4886-3b69-23ad9848d1bf" [ 1591.752804] env[63241]: _type = "Task" [ 1591.752804] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.762556] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520e13e8-7b36-4886-3b69-23ad9848d1bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.997045] env[63241]: DEBUG oslo_concurrency.lockutils [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.937s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.998734] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 51.090s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.998965] env[63241]: DEBUG nova.objects.instance [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lazy-loading 'resources' on Instance uuid 0115b03b-c828-4e8b-a4d2-c98f8ca69c66 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1592.019926] env[63241]: INFO nova.scheduler.client.report [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleted allocations for instance e753da08-d4a5-4f17-85c8-154e843798c9 [ 1592.076830] env[63241]: DEBUG nova.compute.manager [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1592.101859] env[63241]: DEBUG nova.virt.hardware [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1592.102313] env[63241]: DEBUG nova.virt.hardware [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1592.102574] env[63241]: DEBUG nova.virt.hardware [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1592.103276] env[63241]: DEBUG nova.virt.hardware [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1592.103276] env[63241]: DEBUG nova.virt.hardware [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1592.103276] env[63241]: DEBUG nova.virt.hardware [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1592.103421] env[63241]: DEBUG nova.virt.hardware [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1592.103522] env[63241]: DEBUG nova.virt.hardware [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1592.103692] env[63241]: DEBUG nova.virt.hardware [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1592.103855] env[63241]: DEBUG nova.virt.hardware [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1592.104039] env[63241]: DEBUG nova.virt.hardware [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1592.104901] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbc54c4-15a7-4f1d-8344-59ac35bbed06 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.112872] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4390cdb8-adf6-4d25-a9ba-5ffbe3a9267a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.264047] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520e13e8-7b36-4886-3b69-23ad9848d1bf, 'name': SearchDatastore_Task, 'duration_secs': 0.00985} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.264683] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.264683] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 9b61cee5-65b4-499e-80fd-c6ce6f79dd13/9b61cee5-65b4-499e-80fd-c6ce6f79dd13.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1592.265449] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a69669cf-f09e-49bc-af2c-35419281dcf6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.271896] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for the task: (returnval){ [ 1592.271896] env[63241]: value = "task-1820463" [ 1592.271896] env[63241]: _type = "Task" [ 1592.271896] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.280310] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820463, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.427600] env[63241]: DEBUG nova.compute.manager [req-c8c5d241-4c36-449b-bffb-53124aad55b2 req-81a9afaf-251d-45b2-bf48-e2829487f4c2 service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Received event network-vif-plugged-b4bd3b82-c81b-4cde-a17e-1b6f2f52add3 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1592.427823] env[63241]: DEBUG oslo_concurrency.lockutils [req-c8c5d241-4c36-449b-bffb-53124aad55b2 req-81a9afaf-251d-45b2-bf48-e2829487f4c2 service nova] Acquiring lock "9e6ca606-383d-42f0-aea4-edecde33c1a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.428000] env[63241]: DEBUG oslo_concurrency.lockutils [req-c8c5d241-4c36-449b-bffb-53124aad55b2 req-81a9afaf-251d-45b2-bf48-e2829487f4c2 service nova] Lock "9e6ca606-383d-42f0-aea4-edecde33c1a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.428188] env[63241]: DEBUG oslo_concurrency.lockutils [req-c8c5d241-4c36-449b-bffb-53124aad55b2 req-81a9afaf-251d-45b2-bf48-e2829487f4c2 service nova] Lock "9e6ca606-383d-42f0-aea4-edecde33c1a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.428353] env[63241]: DEBUG nova.compute.manager [req-c8c5d241-4c36-449b-bffb-53124aad55b2 req-81a9afaf-251d-45b2-bf48-e2829487f4c2 service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] No waiting events found dispatching network-vif-plugged-b4bd3b82-c81b-4cde-a17e-1b6f2f52add3 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1592.428516] env[63241]: WARNING nova.compute.manager [req-c8c5d241-4c36-449b-bffb-53124aad55b2 req-81a9afaf-251d-45b2-bf48-e2829487f4c2 service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Received unexpected event network-vif-plugged-b4bd3b82-c81b-4cde-a17e-1b6f2f52add3 for instance with vm_state building and task_state spawning. [ 1592.530137] env[63241]: DEBUG oslo_concurrency.lockutils [None req-998f15a8-2716-4a9b-b0c5-7084890e8f3a tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "e753da08-d4a5-4f17-85c8-154e843798c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.977s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.561500] env[63241]: DEBUG nova.network.neutron [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Successfully updated port: b4bd3b82-c81b-4cde-a17e-1b6f2f52add3 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1592.783506] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820463, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473254} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.783824] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 9b61cee5-65b4-499e-80fd-c6ce6f79dd13/9b61cee5-65b4-499e-80fd-c6ce6f79dd13.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1592.785115] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1592.785398] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ee5666f-4c9f-49f4-bfb8-4a0e6cc36beb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.794179] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for the task: (returnval){ [ 1592.794179] env[63241]: value = "task-1820464" [ 1592.794179] env[63241]: _type = "Task" [ 1592.794179] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.808898] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820464, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.815789] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.815856] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.816058] env[63241]: DEBUG nova.network.neutron [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1592.915358] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff67198d-6868-4145-a113-d47f98c59034 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.925869] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df35954-d58e-4a19-a66f-4d4ed9271084 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.955321] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce349c9-bd06-4ecd-904e-c2b07995cb3f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.963610] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7be243c-51fc-4c65-8bc0-22284a015e33 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.981109] env[63241]: DEBUG nova.compute.provider_tree [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1593.064218] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "refresh_cache-9e6ca606-383d-42f0-aea4-edecde33c1a4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.064366] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "refresh_cache-9e6ca606-383d-42f0-aea4-edecde33c1a4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.064517] env[63241]: DEBUG nova.network.neutron [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1593.131075] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.131477] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.131750] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.131989] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.132220] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.134700] env[63241]: INFO nova.compute.manager [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Terminating instance [ 1593.136539] env[63241]: DEBUG nova.compute.manager [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1593.137083] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1593.137888] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d2a910-946e-4c74-9f3f-4d3827cb348c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.147018] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1593.147018] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61483ea1-5c3c-4f41-948b-dfa40f72dac0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.153178] env[63241]: DEBUG oslo_vmware.api [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1593.153178] env[63241]: value = "task-1820465" [ 1593.153178] env[63241]: _type = "Task" [ 1593.153178] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.162818] env[63241]: DEBUG oslo_vmware.api [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820465, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.305293] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820464, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06882} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.305583] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1593.306493] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83803ee-d3ff-43b2-b517-fe1d2fd90285 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.333187] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 9b61cee5-65b4-499e-80fd-c6ce6f79dd13/9b61cee5-65b4-499e-80fd-c6ce6f79dd13.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1593.333846] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d8ccdef-a588-47cc-8720-e2326bad98c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.353629] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for the task: (returnval){ [ 1593.353629] env[63241]: value = "task-1820466" [ 1593.353629] env[63241]: _type = "Task" [ 1593.353629] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.361846] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820466, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.514490] env[63241]: DEBUG nova.scheduler.client.report [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1593.514798] env[63241]: DEBUG nova.compute.provider_tree [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 89 to 90 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1593.515017] env[63241]: DEBUG nova.compute.provider_tree [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1593.592212] env[63241]: DEBUG nova.network.neutron [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance_info_cache with network_info: [{"id": "0457ca89-42e2-485c-a958-773620259283", "address": "fa:16:3e:e2:4e:b4", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0457ca89-42", "ovs_interfaceid": "0457ca89-42e2-485c-a958-773620259283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.596864] env[63241]: DEBUG nova.network.neutron [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1593.667752] env[63241]: DEBUG oslo_vmware.api [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820465, 'name': PowerOffVM_Task, 'duration_secs': 0.203265} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.668651] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1593.668885] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1593.669134] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6228d7f6-2b9b-4a9e-b719-c2c0d50d14fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.736955] env[63241]: DEBUG nova.network.neutron [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Updating instance_info_cache with network_info: [{"id": "b4bd3b82-c81b-4cde-a17e-1b6f2f52add3", "address": "fa:16:3e:75:03:17", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4bd3b82-c8", "ovs_interfaceid": "b4bd3b82-c81b-4cde-a17e-1b6f2f52add3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.777443] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1593.777725] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1593.777954] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleting the datastore file [datastore1] ac35fa03-aeca-4e18-84ab-cb80bb4cabfd {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1593.778261] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dd5441c-2de3-4e02-b6ad-6d491677258d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.785018] env[63241]: DEBUG oslo_vmware.api [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1593.785018] env[63241]: value = "task-1820468" [ 1593.785018] env[63241]: _type = "Task" [ 1593.785018] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.862842] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820466, 'name': ReconfigVM_Task, 'duration_secs': 0.349192} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.863152] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 9b61cee5-65b4-499e-80fd-c6ce6f79dd13/9b61cee5-65b4-499e-80fd-c6ce6f79dd13.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1593.863794] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6153b897-b2b5-4d63-b5ff-cd5f78ee5173 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.869898] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for the task: (returnval){ [ 1593.869898] env[63241]: value = "task-1820469" [ 1593.869898] env[63241]: _type = "Task" [ 1593.869898] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.878031] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820469, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.020262] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.021s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.022742] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.371s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.022996] env[63241]: DEBUG nova.objects.instance [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Lazy-loading 'resources' on Instance uuid c3c278a8-0513-4a7f-881e-b71c70206860 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1594.044398] env[63241]: INFO nova.scheduler.client.report [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Deleted allocations for instance 0115b03b-c828-4e8b-a4d2-c98f8ca69c66 [ 1594.095308] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.239700] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "refresh_cache-9e6ca606-383d-42f0-aea4-edecde33c1a4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.240145] env[63241]: DEBUG nova.compute.manager [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Instance network_info: |[{"id": "b4bd3b82-c81b-4cde-a17e-1b6f2f52add3", "address": "fa:16:3e:75:03:17", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4bd3b82-c8", "ovs_interfaceid": "b4bd3b82-c81b-4cde-a17e-1b6f2f52add3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1594.240988] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:03:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '184687d6-125a-4b58-bb5b-fdb404088eda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4bd3b82-c81b-4cde-a17e-1b6f2f52add3', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1594.253817] env[63241]: DEBUG oslo.service.loopingcall [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1594.254136] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1594.254428] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc01db7f-ba60-4a79-a363-9b8e7fac77a2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.278852] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1594.278852] env[63241]: value = "task-1820470" [ 1594.278852] env[63241]: _type = "Task" [ 1594.278852] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.287708] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820470, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.295463] env[63241]: DEBUG oslo_vmware.api [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820468, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.382177] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820469, 'name': Rename_Task, 'duration_secs': 0.2076} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.382468] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1594.382840] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6af6604e-bd53-4f50-9d1f-68d540adab7e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.389419] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for the task: (returnval){ [ 1594.389419] env[63241]: value = "task-1820471" [ 1594.389419] env[63241]: _type = "Task" [ 1594.389419] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.398097] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820471, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.493943] env[63241]: DEBUG nova.compute.manager [req-6dfa19f6-aa3b-4aab-aec3-975130e2ab85 req-50f7455e-3614-4cc4-a7f1-1503c7f3987c service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Received event network-changed-b4bd3b82-c81b-4cde-a17e-1b6f2f52add3 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1594.494166] env[63241]: DEBUG nova.compute.manager [req-6dfa19f6-aa3b-4aab-aec3-975130e2ab85 req-50f7455e-3614-4cc4-a7f1-1503c7f3987c service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Refreshing instance network info cache due to event network-changed-b4bd3b82-c81b-4cde-a17e-1b6f2f52add3. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1594.494383] env[63241]: DEBUG oslo_concurrency.lockutils [req-6dfa19f6-aa3b-4aab-aec3-975130e2ab85 req-50f7455e-3614-4cc4-a7f1-1503c7f3987c service nova] Acquiring lock "refresh_cache-9e6ca606-383d-42f0-aea4-edecde33c1a4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.494527] env[63241]: DEBUG oslo_concurrency.lockutils [req-6dfa19f6-aa3b-4aab-aec3-975130e2ab85 req-50f7455e-3614-4cc4-a7f1-1503c7f3987c service nova] Acquired lock "refresh_cache-9e6ca606-383d-42f0-aea4-edecde33c1a4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.494718] env[63241]: DEBUG nova.network.neutron [req-6dfa19f6-aa3b-4aab-aec3-975130e2ab85 req-50f7455e-3614-4cc4-a7f1-1503c7f3987c service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Refreshing network info cache for port b4bd3b82-c81b-4cde-a17e-1b6f2f52add3 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1594.555351] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9ce7a94-4ab0-485d-a4c9-6fdf8a46d109 tempest-ListImageFiltersTestJSON-1973084216 tempest-ListImageFiltersTestJSON-1973084216-project-member] Lock "0115b03b-c828-4e8b-a4d2-c98f8ca69c66" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 57.073s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.619555] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6cd9d8-b284-434f-a7ee-31df94ab59ad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.654808] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24f00ea-c43b-4c9b-b7f7-df19b5b5aba0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.663595] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance '965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce' progress to 83 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1594.794496] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820470, 'name': CreateVM_Task, 'duration_secs': 0.430375} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.795164] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1594.795990] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.796325] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.796823] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1594.803471] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bdea3ba-313f-450a-81b2-e0e584977083 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.805389] env[63241]: DEBUG oslo_vmware.api [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820468, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.601049} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.805883] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1594.806091] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1594.806276] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1594.806454] env[63241]: INFO nova.compute.manager [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1594.806702] env[63241]: DEBUG oslo.service.loopingcall [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1594.807592] env[63241]: DEBUG nova.compute.manager [-] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1594.807592] env[63241]: DEBUG nova.network.neutron [-] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1594.811135] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1594.811135] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f85012-e9b7-3c09-38ee-e8561720afa9" [ 1594.811135] env[63241]: _type = "Task" [ 1594.811135] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.820647] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f85012-e9b7-3c09-38ee-e8561720afa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.899721] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820471, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.980998] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f66f2d8-58b2-4a9d-ade0-1b11392d6176 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.988830] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61d43e2-17d7-469a-b796-a587243596aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.022814] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eee823f-64d0-4f5e-8f88-4947042bbcae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.031108] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cc6013-dd66-47a3-9432-546a86e65819 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.046776] env[63241]: DEBUG nova.compute.provider_tree [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1595.171736] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1595.172055] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc3b970e-9740-4bfd-8445-4c0865ab1b21 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.181204] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1595.181204] env[63241]: value = "task-1820472" [ 1595.181204] env[63241]: _type = "Task" [ 1595.181204] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.188868] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820472, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.323358] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f85012-e9b7-3c09-38ee-e8561720afa9, 'name': SearchDatastore_Task, 'duration_secs': 0.010512} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.323787] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.324075] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1595.325077] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.325252] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.325475] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1595.327238] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02c888ca-9a6f-4ef2-ad47-e4c94c14f669 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.335731] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1595.335919] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1595.336707] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c45b9063-912d-40f2-bc30-742ad5b1953e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.342685] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1595.342685] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529f89e9-da4c-9748-50b5-d6b651b0bb10" [ 1595.342685] env[63241]: _type = "Task" [ 1595.342685] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.350716] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529f89e9-da4c-9748-50b5-d6b651b0bb10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.396423] env[63241]: DEBUG nova.compute.manager [req-3b8185a0-4b10-4b75-8789-efd2db4736a5 req-684dccc8-b8ef-4b45-93ba-8451d83b229d service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Received event network-vif-deleted-ab769350-6899-4d15-94f5-ede018f0f344 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1595.396570] env[63241]: INFO nova.compute.manager [req-3b8185a0-4b10-4b75-8789-efd2db4736a5 req-684dccc8-b8ef-4b45-93ba-8451d83b229d service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Neutron deleted interface ab769350-6899-4d15-94f5-ede018f0f344; detaching it from the instance and deleting it from the info cache [ 1595.396748] env[63241]: DEBUG nova.network.neutron [req-3b8185a0-4b10-4b75-8789-efd2db4736a5 req-684dccc8-b8ef-4b45-93ba-8451d83b229d service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.404192] env[63241]: DEBUG oslo_vmware.api [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820471, 'name': PowerOnVM_Task, 'duration_secs': 0.532131} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.404496] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1595.404695] env[63241]: INFO nova.compute.manager [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Took 7.84 seconds to spawn the instance on the hypervisor. [ 1595.404939] env[63241]: DEBUG nova.compute.manager [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1595.406355] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0973674-d175-453b-a4b2-d2f85c0ad3d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.499849] env[63241]: DEBUG nova.network.neutron [req-6dfa19f6-aa3b-4aab-aec3-975130e2ab85 req-50f7455e-3614-4cc4-a7f1-1503c7f3987c service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Updated VIF entry in instance network info cache for port b4bd3b82-c81b-4cde-a17e-1b6f2f52add3. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1595.500224] env[63241]: DEBUG nova.network.neutron [req-6dfa19f6-aa3b-4aab-aec3-975130e2ab85 req-50f7455e-3614-4cc4-a7f1-1503c7f3987c service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Updating instance_info_cache with network_info: [{"id": "b4bd3b82-c81b-4cde-a17e-1b6f2f52add3", "address": "fa:16:3e:75:03:17", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4bd3b82-c8", "ovs_interfaceid": "b4bd3b82-c81b-4cde-a17e-1b6f2f52add3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.580418] env[63241]: DEBUG nova.scheduler.client.report [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1595.580694] env[63241]: DEBUG nova.compute.provider_tree [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 90 to 91 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1595.580877] env[63241]: DEBUG nova.compute.provider_tree [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1595.651246] env[63241]: DEBUG nova.network.neutron [-] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.690986] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820472, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.852661] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529f89e9-da4c-9748-50b5-d6b651b0bb10, 'name': SearchDatastore_Task, 'duration_secs': 0.00978} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.853502] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb600ee6-4474-42d5-a676-df372c6941b5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.859052] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1595.859052] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5233e735-8cef-e0bd-ed87-c187707b2097" [ 1595.859052] env[63241]: _type = "Task" [ 1595.859052] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.867126] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5233e735-8cef-e0bd-ed87-c187707b2097, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.899725] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0822be4-a966-4a56-bcb2-46f92ff1dd6a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.909161] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231ecfcb-cdfd-4f09-a192-93e574810b9d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.925774] env[63241]: INFO nova.compute.manager [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Took 64.39 seconds to build instance. [ 1595.944256] env[63241]: DEBUG nova.compute.manager [req-3b8185a0-4b10-4b75-8789-efd2db4736a5 req-684dccc8-b8ef-4b45-93ba-8451d83b229d service nova] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Detach interface failed, port_id=ab769350-6899-4d15-94f5-ede018f0f344, reason: Instance ac35fa03-aeca-4e18-84ab-cb80bb4cabfd could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1596.002464] env[63241]: DEBUG oslo_concurrency.lockutils [req-6dfa19f6-aa3b-4aab-aec3-975130e2ab85 req-50f7455e-3614-4cc4-a7f1-1503c7f3987c service nova] Releasing lock "refresh_cache-9e6ca606-383d-42f0-aea4-edecde33c1a4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.087605] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.065s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.091102] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 50.251s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.093167] env[63241]: INFO nova.compute.claims [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1596.111987] env[63241]: INFO nova.scheduler.client.report [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Deleted allocations for instance c3c278a8-0513-4a7f-881e-b71c70206860 [ 1596.153405] env[63241]: INFO nova.compute.manager [-] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Took 1.35 seconds to deallocate network for instance. [ 1596.191330] env[63241]: DEBUG oslo_vmware.api [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820472, 'name': PowerOnVM_Task, 'duration_secs': 0.823393} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.192140] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1596.192342] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c43128f9-e20a-4249-8e81-9d294ea77584 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance '965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce' progress to 100 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1596.308516] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquiring lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.369558] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5233e735-8cef-e0bd-ed87-c187707b2097, 'name': SearchDatastore_Task, 'duration_secs': 0.009686} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.369850] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.370121] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 9e6ca606-383d-42f0-aea4-edecde33c1a4/9e6ca606-383d-42f0-aea4-edecde33c1a4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1596.370382] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-68b957ab-64e0-4298-9047-f70cd2ab6676 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.377127] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1596.377127] env[63241]: value = "task-1820473" [ 1596.377127] env[63241]: _type = "Task" [ 1596.377127] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.384688] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820473, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.429344] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dceb6f94-bfaf-4411-b788-255d9ed64a4e tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.905s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.430815] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.122s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.430958] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquiring lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.431183] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.431456] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.433188] env[63241]: INFO nova.compute.manager [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Terminating instance [ 1596.435330] env[63241]: DEBUG nova.compute.manager [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1596.435330] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1596.436136] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8760bb33-d5b9-4d61-a6d7-80adcebde902 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.448400] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1596.448400] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3acda15-5c7c-4778-8415-609d17015016 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.455505] env[63241]: DEBUG oslo_vmware.api [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for the task: (returnval){ [ 1596.455505] env[63241]: value = "task-1820474" [ 1596.455505] env[63241]: _type = "Task" [ 1596.455505] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.463798] env[63241]: DEBUG oslo_vmware.api [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820474, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.620910] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0ecef0e8-a808-4b02-9b9c-861dad372381 tempest-ServersNegativeTestMultiTenantJSON-884736452 tempest-ServersNegativeTestMultiTenantJSON-884736452-project-member] Lock "c3c278a8-0513-4a7f-881e-b71c70206860" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.002s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.659692] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.887588] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820473, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500814} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.887964] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 9e6ca606-383d-42f0-aea4-edecde33c1a4/9e6ca606-383d-42f0-aea4-edecde33c1a4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1596.888749] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1596.888749] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ca2945d-d554-4ee3-a4b2-d66779be02d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.895057] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1596.895057] env[63241]: value = "task-1820475" [ 1596.895057] env[63241]: _type = "Task" [ 1596.895057] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.903150] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820475, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.934014] env[63241]: DEBUG nova.compute.manager [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1596.966371] env[63241]: DEBUG oslo_vmware.api [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820474, 'name': PowerOffVM_Task, 'duration_secs': 0.305397} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.966730] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1596.966944] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1596.967263] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78e8b5d2-866b-4632-8364-ee6cdcd27622 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.063106] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1597.063354] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1597.063544] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Deleting the datastore file [datastore1] 9b61cee5-65b4-499e-80fd-c6ce6f79dd13 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1597.063809] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70663617-4a52-4689-bb08-f6c03e274fe9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.070975] env[63241]: DEBUG oslo_vmware.api [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for the task: (returnval){ [ 1597.070975] env[63241]: value = "task-1820477" [ 1597.070975] env[63241]: _type = "Task" [ 1597.070975] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.078882] env[63241]: DEBUG oslo_vmware.api [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.407566] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820475, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069629} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.407806] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1597.408956] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c3dab3-fd45-4856-8206-301b83ffc835 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.223228] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 9e6ca606-383d-42f0-aea4-edecde33c1a4/9e6ca606-383d-42f0-aea4-edecde33c1a4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1598.236080] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39484a56-3edb-4026-ba86-fa3e2e7aea6a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.264331] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1598.264331] env[63241]: value = "task-1820478" [ 1598.264331] env[63241]: _type = "Task" [ 1598.264331] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.264638] env[63241]: DEBUG oslo_vmware.api [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Task: {'id': task-1820477, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152482} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.265832] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1598.265832] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1598.265832] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1598.265832] env[63241]: INFO nova.compute.manager [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Took 1.83 seconds to destroy the instance on the hypervisor. [ 1598.266313] env[63241]: DEBUG oslo.service.loopingcall [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1598.269361] env[63241]: DEBUG nova.compute.manager [-] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1598.269460] env[63241]: DEBUG nova.network.neutron [-] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1598.273324] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.277854] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820478, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.280919] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01d2521-4840-4f86-b1d1-fae75aba7b0f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.287911] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d552e71e-99db-40ec-8024-976952b61005 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.322883] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152275e8-d741-4b78-a2b5-222b63361277 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.331473] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d270a73-8a19-48be-9477-eecb354f8159 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.347050] env[63241]: DEBUG nova.compute.provider_tree [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1598.754221] env[63241]: DEBUG oslo_concurrency.lockutils [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.754221] env[63241]: DEBUG oslo_concurrency.lockutils [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.754221] env[63241]: DEBUG nova.compute.manager [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Going to confirm migration 1 {{(pid=63241) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1598.775925] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820478, 'name': ReconfigVM_Task, 'duration_secs': 0.314591} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.776445] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 9e6ca606-383d-42f0-aea4-edecde33c1a4/9e6ca606-383d-42f0-aea4-edecde33c1a4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1598.777514] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3fc06c75-7987-46c9-a4c9-afc68fe9da7e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.783687] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1598.783687] env[63241]: value = "task-1820479" [ 1598.783687] env[63241]: _type = "Task" [ 1598.783687] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.792133] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820479, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.856028] env[63241]: DEBUG nova.scheduler.client.report [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1598.989516] env[63241]: DEBUG nova.compute.manager [req-c75bae0e-b756-4be7-9cb2-409d68191826 req-fa43e3b5-13d5-4d6d-8b02-69bbd44e32e1 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Received event network-vif-deleted-2dcf61ef-f6c5-4404-93d1-eb41df5f55f5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1598.989829] env[63241]: INFO nova.compute.manager [req-c75bae0e-b756-4be7-9cb2-409d68191826 req-fa43e3b5-13d5-4d6d-8b02-69bbd44e32e1 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Neutron deleted interface 2dcf61ef-f6c5-4404-93d1-eb41df5f55f5; detaching it from the instance and deleting it from the info cache [ 1598.990414] env[63241]: DEBUG nova.network.neutron [req-c75bae0e-b756-4be7-9cb2-409d68191826 req-fa43e3b5-13d5-4d6d-8b02-69bbd44e32e1 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.294090] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820479, 'name': Rename_Task, 'duration_secs': 0.153737} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.294388] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1599.294637] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37668560-9f35-4457-bdea-dcc7bcabbcbb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.301037] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1599.301037] env[63241]: value = "task-1820480" [ 1599.301037] env[63241]: _type = "Task" [ 1599.301037] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.308295] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820480, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.333904] env[63241]: DEBUG oslo_concurrency.lockutils [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1599.334200] env[63241]: DEBUG oslo_concurrency.lockutils [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.334398] env[63241]: DEBUG nova.network.neutron [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1599.334590] env[63241]: DEBUG nova.objects.instance [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lazy-loading 'info_cache' on Instance uuid 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1599.361282] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.271s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.361895] env[63241]: DEBUG nova.compute.manager [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1599.365047] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 43.283s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.404979] env[63241]: DEBUG nova.network.neutron [-] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.492855] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f4afd12-65e0-4962-b017-b7b313bb17b5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.502942] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3a85fb-dfe8-4f5e-a9e5-a45513e66767 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.533336] env[63241]: DEBUG nova.compute.manager [req-c75bae0e-b756-4be7-9cb2-409d68191826 req-fa43e3b5-13d5-4d6d-8b02-69bbd44e32e1 service nova] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Detach interface failed, port_id=2dcf61ef-f6c5-4404-93d1-eb41df5f55f5, reason: Instance 9b61cee5-65b4-499e-80fd-c6ce6f79dd13 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1599.812105] env[63241]: DEBUG oslo_vmware.api [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820480, 'name': PowerOnVM_Task, 'duration_secs': 0.441977} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.812622] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1599.812851] env[63241]: INFO nova.compute.manager [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Took 7.74 seconds to spawn the instance on the hypervisor. [ 1599.813231] env[63241]: DEBUG nova.compute.manager [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1599.815238] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260deeee-d65c-4530-94bf-7d77be431179 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.877978] env[63241]: DEBUG nova.compute.utils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1599.880941] env[63241]: DEBUG nova.compute.manager [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Not allocating networking since 'none' was specified. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1599.907859] env[63241]: INFO nova.compute.manager [-] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Took 1.64 seconds to deallocate network for instance. [ 1600.345685] env[63241]: INFO nova.compute.manager [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Took 62.82 seconds to build instance. [ 1600.377957] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Applying migration context for instance 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce as it has an incoming, in-progress migration dbead17e-aa42-4c50-ae83-6d0d9b03d450. Migration status is confirming {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1600.379718] env[63241]: INFO nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating resource usage from migration dbead17e-aa42-4c50-ae83-6d0d9b03d450 [ 1600.383182] env[63241]: DEBUG nova.compute.manager [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1600.410162] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance a1a8342a-b00e-42c1-8c01-a95659a78caf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.410485] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 0e5447fd-a04f-4bc2-b329-e015883773b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.412729] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 94a604da-ad3d-415a-aa92-d648e3da803d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.412729] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance ac35fa03-aeca-4e18-84ab-cb80bb4cabfd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1600.412729] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 72a11582-1fad-428a-bde1-e9d0b05731cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.412729] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 9361ee6a-7c4d-4409-bc3c-7da7d4550d97 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.412729] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance c7b034f7-1d7f-4782-9ecb-5987c35339cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.412729] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance a534b054-2143-41c4-a0fa-028339ecdbbf is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1600.412729] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 7158c64a-5036-419b-b110-7e22c12bf3dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.412729] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e3842404-2c80-4fa9-b0c9-c58c484845a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.412729] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 943100f1-e702-4869-8c19-d81d39712ac5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1600.412729] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 34d138e6-90b3-4243-bf45-96ae856cd631 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1600.412729] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance f4eb5e49-bae0-435c-93f0-15d6939f9e7c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1600.412729] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Migration dbead17e-aa42-4c50-ae83-6d0d9b03d450 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1600.412729] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.412729] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 9b61cee5-65b4-499e-80fd-c6ce6f79dd13 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.413289] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 9e6ca606-383d-42f0-aea4-edecde33c1a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.413289] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance ef36a081-6273-4397-b48f-c2bd03d0a865 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1600.417623] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.654322] env[63241]: DEBUG nova.network.neutron [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance_info_cache with network_info: [{"id": "0457ca89-42e2-485c-a958-773620259283", "address": "fa:16:3e:e2:4e:b4", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0457ca89-42", "ovs_interfaceid": "0457ca89-42e2-485c-a958-773620259283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1600.848163] env[63241]: DEBUG oslo_concurrency.lockutils [None req-810bdda5-3a73-4ef9-864e-482c1a310ae2 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "9e6ca606-383d-42f0-aea4-edecde33c1a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.334s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.919616] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 31e84206-e583-4610-969e-2ccae2d0b206 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1601.045375] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "9e6ca606-383d-42f0-aea4-edecde33c1a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.045647] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "9e6ca606-383d-42f0-aea4-edecde33c1a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.045860] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "9e6ca606-383d-42f0-aea4-edecde33c1a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.046055] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "9e6ca606-383d-42f0-aea4-edecde33c1a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.046226] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "9e6ca606-383d-42f0-aea4-edecde33c1a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.048851] env[63241]: INFO nova.compute.manager [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Terminating instance [ 1601.050735] env[63241]: DEBUG nova.compute.manager [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1601.050934] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1601.051862] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c246b8-7e8e-4c1d-8fa1-3b7137e68178 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.060414] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1601.060608] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38e9b5dd-943c-4cd3-ae85-5ea0732e5d82 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.067385] env[63241]: DEBUG oslo_vmware.api [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1601.067385] env[63241]: value = "task-1820481" [ 1601.067385] env[63241]: _type = "Task" [ 1601.067385] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.076280] env[63241]: DEBUG oslo_vmware.api [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820481, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.157413] env[63241]: DEBUG oslo_concurrency.lockutils [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "refresh_cache-965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.157764] env[63241]: DEBUG nova.objects.instance [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lazy-loading 'migration_context' on Instance uuid 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1601.376228] env[63241]: DEBUG nova.compute.manager [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1601.394468] env[63241]: DEBUG nova.compute.manager [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1601.422062] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 9d301157-6870-4452-9ae6-0d45c4338886 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1601.427364] env[63241]: DEBUG nova.virt.hardware [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1601.427364] env[63241]: DEBUG nova.virt.hardware [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1601.427364] env[63241]: DEBUG nova.virt.hardware [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1601.427364] env[63241]: DEBUG nova.virt.hardware [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1601.427364] env[63241]: DEBUG nova.virt.hardware [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1601.427364] env[63241]: DEBUG nova.virt.hardware [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1601.427364] env[63241]: DEBUG nova.virt.hardware [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1601.427364] env[63241]: DEBUG nova.virt.hardware [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1601.427364] env[63241]: DEBUG nova.virt.hardware [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1601.427364] env[63241]: DEBUG nova.virt.hardware [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1601.427801] env[63241]: DEBUG nova.virt.hardware [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1601.428481] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5d32db-6f1e-42e6-9bcc-b457729180dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.436745] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466a14aa-8aa0-490a-bec5-41181b9a54aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.450215] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1601.458088] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Creating folder: Project (ffeb067762db4b299d5b537c45a83853). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1601.458088] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5117dab9-8888-4bc6-829e-f5969ea1c8f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.467451] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Created folder: Project (ffeb067762db4b299d5b537c45a83853) in parent group-v376927. [ 1601.467674] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Creating folder: Instances. Parent ref: group-v377097. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1601.467912] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-114ebe6c-93c9-4f76-9411-605058c06cbb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.476588] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Created folder: Instances in parent group-v377097. [ 1601.476853] env[63241]: DEBUG oslo.service.loopingcall [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1601.477056] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1601.477397] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e2d51cf-94c7-4904-9c7b-cc1cd5301395 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.493921] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1601.493921] env[63241]: value = "task-1820484" [ 1601.493921] env[63241]: _type = "Task" [ 1601.493921] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.500884] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820484, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.577864] env[63241]: DEBUG oslo_vmware.api [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820481, 'name': PowerOffVM_Task, 'duration_secs': 0.186988} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.578148] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1601.578318] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1601.578566] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0d55ff7-f22b-461a-8618-39f5a8d0d76c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.661449] env[63241]: DEBUG nova.objects.base [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Object Instance<965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce> lazy-loaded attributes: info_cache,migration_context {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1601.662599] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f816eaaa-d260-4d7b-bcb2-e123eca59c5c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.681691] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf86eb75-a824-480c-9e6c-f0589ba489a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.687255] env[63241]: DEBUG oslo_vmware.api [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1601.687255] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52590d54-c841-fba1-947f-0d7127c6797e" [ 1601.687255] env[63241]: _type = "Task" [ 1601.687255] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.696560] env[63241]: DEBUG oslo_vmware.api [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52590d54-c841-fba1-947f-0d7127c6797e, 'name': SearchDatastore_Task, 'duration_secs': 0.005993} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.696560] env[63241]: DEBUG oslo_concurrency.lockutils [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.702669] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1601.703376] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1601.703376] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleting the datastore file [datastore1] 9e6ca606-383d-42f0-aea4-edecde33c1a4 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1601.703376] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bebbb07d-4903-4701-94d0-9fad3a34975d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.711817] env[63241]: DEBUG oslo_vmware.api [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1601.711817] env[63241]: value = "task-1820486" [ 1601.711817] env[63241]: _type = "Task" [ 1601.711817] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.722819] env[63241]: DEBUG oslo_vmware.api [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820486, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.899009] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.924996] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance cfdc6b34-6940-414f-b17d-6fe17f92474a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1602.006468] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820484, 'name': CreateVM_Task, 'duration_secs': 0.252039} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.006568] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1602.006971] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.007147] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.007459] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1602.007704] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93cc67a0-6944-4ee4-8ef7-81bf1de5953e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.012156] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for the task: (returnval){ [ 1602.012156] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52278cda-4d6d-338c-05b5-f7b14f5da65f" [ 1602.012156] env[63241]: _type = "Task" [ 1602.012156] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.019540] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52278cda-4d6d-338c-05b5-f7b14f5da65f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.222285] env[63241]: DEBUG oslo_vmware.api [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820486, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143636} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.222580] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1602.222837] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1602.223120] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1602.223321] env[63241]: INFO nova.compute.manager [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1602.223594] env[63241]: DEBUG oslo.service.loopingcall [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1602.223823] env[63241]: DEBUG nova.compute.manager [-] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1602.223944] env[63241]: DEBUG nova.network.neutron [-] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1602.428041] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance f583adda-976e-4f79-adc7-0b4e1a73ad73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1602.526006] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52278cda-4d6d-338c-05b5-f7b14f5da65f, 'name': SearchDatastore_Task, 'duration_secs': 0.009308} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.526334] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.526665] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1602.526915] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.527074] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.531187] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1602.531732] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e99801a-f5f0-4f19-b524-5f8897f05515 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.540385] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1602.540563] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1602.541279] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d700f140-2a3b-4ebe-b130-ee1582d956ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.546346] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for the task: (returnval){ [ 1602.546346] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c6b4bc-b5e0-3b47-82ca-e3319fdfbeff" [ 1602.546346] env[63241]: _type = "Task" [ 1602.546346] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.553947] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c6b4bc-b5e0-3b47-82ca-e3319fdfbeff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.570276] env[63241]: DEBUG nova.compute.manager [req-07d1b403-bc8a-477f-9f72-9696bce41f04 req-6629738a-2760-4c3b-9a47-535cc7cbd4e8 service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Received event network-vif-deleted-b4bd3b82-c81b-4cde-a17e-1b6f2f52add3 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1602.570476] env[63241]: INFO nova.compute.manager [req-07d1b403-bc8a-477f-9f72-9696bce41f04 req-6629738a-2760-4c3b-9a47-535cc7cbd4e8 service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Neutron deleted interface b4bd3b82-c81b-4cde-a17e-1b6f2f52add3; detaching it from the instance and deleting it from the info cache [ 1602.570646] env[63241]: DEBUG nova.network.neutron [req-07d1b403-bc8a-477f-9f72-9696bce41f04 req-6629738a-2760-4c3b-9a47-535cc7cbd4e8 service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.931831] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 1626092d-78ef-41b5-8b47-fb840d63e4f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1603.005897] env[63241]: DEBUG nova.network.neutron [-] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.057669] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c6b4bc-b5e0-3b47-82ca-e3319fdfbeff, 'name': SearchDatastore_Task, 'duration_secs': 0.008138} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.058586] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b50cb83-8826-4283-b8d7-f7ecdd2b2054 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.064388] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for the task: (returnval){ [ 1603.064388] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528786ad-07fd-bcd0-a470-a1fdbdf28fe1" [ 1603.064388] env[63241]: _type = "Task" [ 1603.064388] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.072733] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528786ad-07fd-bcd0-a470-a1fdbdf28fe1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.072971] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f7399ef-514c-409b-b2e4-91bb3c0895e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.081169] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f6f242-a916-47f8-8cc1-1c743e57c76c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.111037] env[63241]: DEBUG nova.compute.manager [req-07d1b403-bc8a-477f-9f72-9696bce41f04 req-6629738a-2760-4c3b-9a47-535cc7cbd4e8 service nova] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Detach interface failed, port_id=b4bd3b82-c81b-4cde-a17e-1b6f2f52add3, reason: Instance 9e6ca606-383d-42f0-aea4-edecde33c1a4 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1603.435434] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 20c7a1a1-4396-414f-a52c-06551722b6eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1603.509047] env[63241]: INFO nova.compute.manager [-] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Took 1.28 seconds to deallocate network for instance. [ 1603.575264] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528786ad-07fd-bcd0-a470-a1fdbdf28fe1, 'name': SearchDatastore_Task, 'duration_secs': 0.009957} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.575528] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.575778] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] ef36a081-6273-4397-b48f-c2bd03d0a865/ef36a081-6273-4397-b48f-c2bd03d0a865.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1603.576044] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8e53c9b-f63c-4323-8de7-3c3b8ae2f8d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.582468] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for the task: (returnval){ [ 1603.582468] env[63241]: value = "task-1820487" [ 1603.582468] env[63241]: _type = "Task" [ 1603.582468] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.590298] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820487, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.938845] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance fb5d60fa-fa13-44a1-8291-4645761a0c80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1604.015038] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.093273] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820487, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44795} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.093536] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] ef36a081-6273-4397-b48f-c2bd03d0a865/ef36a081-6273-4397-b48f-c2bd03d0a865.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1604.093744] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1604.093987] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4820be3f-f64b-46ba-8e0c-2c2c4a02097b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.100271] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for the task: (returnval){ [ 1604.100271] env[63241]: value = "task-1820488" [ 1604.100271] env[63241]: _type = "Task" [ 1604.100271] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.107243] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820488, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.442426] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 14af9f82-525e-453c-8dc5-ef5b13c67ee4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1604.611166] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820488, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064651} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.611166] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1604.611435] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288a7d71-897f-434a-9da9-6dd08ea73cec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.630662] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] ef36a081-6273-4397-b48f-c2bd03d0a865/ef36a081-6273-4397-b48f-c2bd03d0a865.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1604.631252] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d066af22-46e5-481a-be94-e316b097edc1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.651374] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for the task: (returnval){ [ 1604.651374] env[63241]: value = "task-1820489" [ 1604.651374] env[63241]: _type = "Task" [ 1604.651374] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.660721] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820489, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.945457] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance cb7eb689-b8f6-479d-aa6b-c27fab16e131 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1604.945756] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1604.945994] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3072MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1605.163026] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820489, 'name': ReconfigVM_Task, 'duration_secs': 0.265764} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.163157] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Reconfigured VM instance instance-0000003c to attach disk [datastore1] ef36a081-6273-4397-b48f-c2bd03d0a865/ef36a081-6273-4397-b48f-c2bd03d0a865.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1605.163751] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-248e93a0-b0d8-4160-9723-ddfd7ccde0cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.173151] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for the task: (returnval){ [ 1605.173151] env[63241]: value = "task-1820490" [ 1605.173151] env[63241]: _type = "Task" [ 1605.173151] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.182211] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820490, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.248530] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9e956c-f322-4723-82b3-c85af1d869db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.255690] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86850cdf-b124-4b59-9060-e7fc5c4c2ba5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.285917] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a28a13-719f-483e-bb50-f2597913004b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.293281] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a831c68-6bc5-4965-8b4c-297e1534fc70 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.308372] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1605.683116] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820490, 'name': Rename_Task, 'duration_secs': 0.145457} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.683441] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1605.683681] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86458054-6992-4f36-a807-d0957a554ded {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.690015] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for the task: (returnval){ [ 1605.690015] env[63241]: value = "task-1820491" [ 1605.690015] env[63241]: _type = "Task" [ 1605.690015] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.697170] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820491, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.841457] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 91 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1605.841701] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 91 to 92 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1605.841866] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1606.201450] env[63241]: DEBUG oslo_vmware.api [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820491, 'name': PowerOnVM_Task, 'duration_secs': 0.393153} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.201727] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1606.201928] env[63241]: INFO nova.compute.manager [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Took 4.81 seconds to spawn the instance on the hypervisor. [ 1606.202122] env[63241]: DEBUG nova.compute.manager [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1606.202853] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d28ca9c-1534-443a-9af5-c51649f6e13c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.346423] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1606.346813] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.982s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.347034] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.660s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.348598] env[63241]: INFO nova.compute.claims [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1606.719287] env[63241]: INFO nova.compute.manager [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Took 60.91 seconds to build instance. [ 1607.221050] env[63241]: DEBUG oslo_concurrency.lockutils [None req-cc26d71b-3e23-4b92-a311-7f528d204da4 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lock "ef36a081-6273-4397-b48f-c2bd03d0a865" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.422s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.604196] env[63241]: DEBUG nova.compute.manager [None req-905d32b5-eb37-4a0a-a2ef-c5b3bbfd7c50 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1607.605200] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e54bd23-57d1-4143-b27f-80885642cd03 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.667647] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367dac59-fc23-4964-b74a-d5ddf3248319 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.675108] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquiring lock "ef36a081-6273-4397-b48f-c2bd03d0a865" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1607.675352] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lock "ef36a081-6273-4397-b48f-c2bd03d0a865" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1607.675553] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquiring lock "ef36a081-6273-4397-b48f-c2bd03d0a865-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1607.675733] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lock "ef36a081-6273-4397-b48f-c2bd03d0a865-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1607.675902] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lock "ef36a081-6273-4397-b48f-c2bd03d0a865-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.678163] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9b679c-bbf7-45a9-aa2c-9d550cc92f2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.681444] env[63241]: INFO nova.compute.manager [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Terminating instance [ 1607.683299] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquiring lock "refresh_cache-ef36a081-6273-4397-b48f-c2bd03d0a865" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.683491] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquired lock "refresh_cache-ef36a081-6273-4397-b48f-c2bd03d0a865" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.683667] env[63241]: DEBUG nova.network.neutron [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1607.710793] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7dd130f-1506-4dc6-a4ea-9527361f64eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.718475] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7856f063-ade7-4f61-ac44-5c418552cef2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.731938] env[63241]: DEBUG nova.compute.provider_tree [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1608.118235] env[63241]: INFO nova.compute.manager [None req-905d32b5-eb37-4a0a-a2ef-c5b3bbfd7c50 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] instance snapshotting [ 1608.119125] env[63241]: DEBUG nova.objects.instance [None req-905d32b5-eb37-4a0a-a2ef-c5b3bbfd7c50 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lazy-loading 'flavor' on Instance uuid ef36a081-6273-4397-b48f-c2bd03d0a865 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1608.205922] env[63241]: DEBUG nova.network.neutron [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1608.234488] env[63241]: DEBUG nova.scheduler.client.report [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1608.263632] env[63241]: DEBUG nova.network.neutron [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.626227] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6dada0-9a54-47f2-b903-7eea6d55815a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.646168] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5fa6ef-9429-4621-8e49-8871d3ae4461 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.739584] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.392s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.740449] env[63241]: DEBUG nova.compute.manager [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1608.743374] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 48.716s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.744999] env[63241]: INFO nova.compute.claims [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1608.765925] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Releasing lock "refresh_cache-ef36a081-6273-4397-b48f-c2bd03d0a865" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.766382] env[63241]: DEBUG nova.compute.manager [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1608.766593] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1608.767521] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a630b4-55c9-49ff-bbe8-d339443f5378 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.777754] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1608.777986] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a15fd2d-6611-4f78-97dc-8bb92ea3aef1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.785773] env[63241]: DEBUG oslo_vmware.api [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for the task: (returnval){ [ 1608.785773] env[63241]: value = "task-1820492" [ 1608.785773] env[63241]: _type = "Task" [ 1608.785773] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.794630] env[63241]: DEBUG oslo_vmware.api [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820492, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.156210] env[63241]: DEBUG nova.compute.manager [None req-905d32b5-eb37-4a0a-a2ef-c5b3bbfd7c50 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Instance disappeared during snapshot {{(pid=63241) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1609.255151] env[63241]: DEBUG nova.compute.utils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1609.255151] env[63241]: DEBUG nova.compute.manager [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1609.255151] env[63241]: DEBUG nova.network.neutron [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1609.301203] env[63241]: DEBUG oslo_vmware.api [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820492, 'name': PowerOffVM_Task, 'duration_secs': 0.120203} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.301203] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1609.301203] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1609.301203] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c31f08f-49f0-47a7-a64f-7b6269abfcfb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.317902] env[63241]: DEBUG nova.policy [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de6df2e8caaa4c0c82c94f9d107a8e17', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6964b0dd75c4704b8f5cacd2c8e355f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1609.321476] env[63241]: DEBUG nova.compute.manager [None req-905d32b5-eb37-4a0a-a2ef-c5b3bbfd7c50 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Found 0 images (rotation: 2) {{(pid=63241) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1609.325738] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1609.325940] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1609.326166] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Deleting the datastore file [datastore1] ef36a081-6273-4397-b48f-c2bd03d0a865 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1609.326419] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4bcf69ae-352b-42cd-b205-3dcf98e728dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.332515] env[63241]: DEBUG oslo_vmware.api [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for the task: (returnval){ [ 1609.332515] env[63241]: value = "task-1820494" [ 1609.332515] env[63241]: _type = "Task" [ 1609.332515] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.340826] env[63241]: DEBUG oslo_vmware.api [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820494, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.607123] env[63241]: DEBUG nova.network.neutron [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Successfully created port: 6bc11935-f0d5-456c-b815-ea415689a621 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1609.756730] env[63241]: DEBUG nova.compute.manager [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1609.846597] env[63241]: DEBUG oslo_vmware.api [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Task: {'id': task-1820494, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08566} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.846956] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1609.847245] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1609.847486] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1609.847772] env[63241]: INFO nova.compute.manager [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1609.848080] env[63241]: DEBUG oslo.service.loopingcall [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1609.848339] env[63241]: DEBUG nova.compute.manager [-] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1609.848464] env[63241]: DEBUG nova.network.neutron [-] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1609.865368] env[63241]: DEBUG nova.network.neutron [-] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1610.104775] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcb9dc1-056c-43f8-9061-10d4a622fcd1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.112484] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a8074a-82ab-4e6d-be52-176eb3d08e47 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.142954] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bcda50-ad9c-4768-9e42-47356fe95549 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.150271] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9836b1ed-2131-4a2a-b002-a2673d7ed47a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.163508] env[63241]: DEBUG nova.compute.provider_tree [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1610.372042] env[63241]: DEBUG nova.network.neutron [-] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.693808] env[63241]: DEBUG nova.scheduler.client.report [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 92 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1610.694089] env[63241]: DEBUG nova.compute.provider_tree [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 92 to 93 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1610.694304] env[63241]: DEBUG nova.compute.provider_tree [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1610.768515] env[63241]: DEBUG nova.compute.manager [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1610.793864] env[63241]: DEBUG nova.virt.hardware [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1610.794145] env[63241]: DEBUG nova.virt.hardware [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1610.794311] env[63241]: DEBUG nova.virt.hardware [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1610.794493] env[63241]: DEBUG nova.virt.hardware [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1610.794642] env[63241]: DEBUG nova.virt.hardware [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1610.794789] env[63241]: DEBUG nova.virt.hardware [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1610.794994] env[63241]: DEBUG nova.virt.hardware [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1610.795279] env[63241]: DEBUG nova.virt.hardware [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1610.795496] env[63241]: DEBUG nova.virt.hardware [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1610.795670] env[63241]: DEBUG nova.virt.hardware [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1610.795871] env[63241]: DEBUG nova.virt.hardware [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1610.796722] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2474937b-2c3a-4968-8f99-9656d8bed0bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.805264] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6e3837-2c45-4092-8ede-098d0a720437 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.874023] env[63241]: INFO nova.compute.manager [-] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Took 1.03 seconds to deallocate network for instance. [ 1611.196755] env[63241]: DEBUG nova.compute.manager [req-1782be1d-5b2e-4496-81ff-ab0a3ddafeed req-288e6549-1d87-45e2-9579-85e809f403af service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Received event network-vif-plugged-6bc11935-f0d5-456c-b815-ea415689a621 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1611.196981] env[63241]: DEBUG oslo_concurrency.lockutils [req-1782be1d-5b2e-4496-81ff-ab0a3ddafeed req-288e6549-1d87-45e2-9579-85e809f403af service nova] Acquiring lock "31e84206-e583-4610-969e-2ccae2d0b206-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.197252] env[63241]: DEBUG oslo_concurrency.lockutils [req-1782be1d-5b2e-4496-81ff-ab0a3ddafeed req-288e6549-1d87-45e2-9579-85e809f403af service nova] Lock "31e84206-e583-4610-969e-2ccae2d0b206-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.197427] env[63241]: DEBUG oslo_concurrency.lockutils [req-1782be1d-5b2e-4496-81ff-ab0a3ddafeed req-288e6549-1d87-45e2-9579-85e809f403af service nova] Lock "31e84206-e583-4610-969e-2ccae2d0b206-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.197597] env[63241]: DEBUG nova.compute.manager [req-1782be1d-5b2e-4496-81ff-ab0a3ddafeed req-288e6549-1d87-45e2-9579-85e809f403af service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] No waiting events found dispatching network-vif-plugged-6bc11935-f0d5-456c-b815-ea415689a621 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1611.197799] env[63241]: WARNING nova.compute.manager [req-1782be1d-5b2e-4496-81ff-ab0a3ddafeed req-288e6549-1d87-45e2-9579-85e809f403af service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Received unexpected event network-vif-plugged-6bc11935-f0d5-456c-b815-ea415689a621 for instance with vm_state building and task_state spawning. [ 1611.198865] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.199544] env[63241]: DEBUG nova.compute.manager [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1611.202443] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.492s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.203787] env[63241]: INFO nova.compute.claims [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1611.299584] env[63241]: DEBUG nova.network.neutron [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Successfully updated port: 6bc11935-f0d5-456c-b815-ea415689a621 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1611.380910] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.710875] env[63241]: DEBUG nova.compute.utils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1611.712293] env[63241]: DEBUG nova.compute.manager [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1611.712460] env[63241]: DEBUG nova.network.neutron [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1611.751450] env[63241]: DEBUG nova.policy [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54dc853b6f204a75ae7612f9fbd2d1f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecafb0abbdc74501b22b20b797c4c60c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1611.805388] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.805602] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.805809] env[63241]: DEBUG nova.network.neutron [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1612.051772] env[63241]: DEBUG nova.network.neutron [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Successfully created port: 10657b5b-6750-4389-b802-7e6bee8963e7 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1612.217654] env[63241]: DEBUG nova.compute.manager [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1612.339868] env[63241]: DEBUG nova.network.neutron [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1612.470332] env[63241]: DEBUG nova.network.neutron [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updating instance_info_cache with network_info: [{"id": "6bc11935-f0d5-456c-b815-ea415689a621", "address": "fa:16:3e:de:e7:97", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bc11935-f0", "ovs_interfaceid": "6bc11935-f0d5-456c-b815-ea415689a621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.568098] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4853a078-c7f3-45ef-aff7-3597564f3d05 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.576125] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba0c6b5-03fc-4e7d-84e7-9956b3dd04b5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.605670] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563491f2-d839-407e-be53-f40d65257999 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.612785] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4109dfb-f9e8-4658-a13b-84472136685f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.626871] env[63241]: DEBUG nova.compute.provider_tree [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1612.972707] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.973113] env[63241]: DEBUG nova.compute.manager [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Instance network_info: |[{"id": "6bc11935-f0d5-456c-b815-ea415689a621", "address": "fa:16:3e:de:e7:97", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bc11935-f0", "ovs_interfaceid": "6bc11935-f0d5-456c-b815-ea415689a621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1612.973578] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:e7:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6bc11935-f0d5-456c-b815-ea415689a621', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1612.981478] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Creating folder: Project (f6964b0dd75c4704b8f5cacd2c8e355f). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1612.981763] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aba07c0c-1b39-43b8-9b0d-0a5d276a1e44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.993227] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Created folder: Project (f6964b0dd75c4704b8f5cacd2c8e355f) in parent group-v376927. [ 1612.993410] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Creating folder: Instances. Parent ref: group-v377100. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1612.993622] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fad6a0f3-9206-4fa9-a67a-d33805d1fff4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.002338] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Created folder: Instances in parent group-v377100. [ 1613.002537] env[63241]: DEBUG oslo.service.loopingcall [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1613.002713] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1613.002905] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b1915f4-a5cb-48c0-870c-ebf9e3bbf7ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.021026] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1613.021026] env[63241]: value = "task-1820497" [ 1613.021026] env[63241]: _type = "Task" [ 1613.021026] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.028514] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820497, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.130909] env[63241]: DEBUG nova.scheduler.client.report [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1613.224599] env[63241]: DEBUG nova.compute.manager [req-a3891e3e-b265-4fcc-beaa-31928f4763b3 req-b4c61bf7-cc2d-4217-ba50-62b7396ebc4f service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Received event network-changed-6bc11935-f0d5-456c-b815-ea415689a621 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1613.224760] env[63241]: DEBUG nova.compute.manager [req-a3891e3e-b265-4fcc-beaa-31928f4763b3 req-b4c61bf7-cc2d-4217-ba50-62b7396ebc4f service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Refreshing instance network info cache due to event network-changed-6bc11935-f0d5-456c-b815-ea415689a621. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1613.224986] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3891e3e-b265-4fcc-beaa-31928f4763b3 req-b4c61bf7-cc2d-4217-ba50-62b7396ebc4f service nova] Acquiring lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.225147] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3891e3e-b265-4fcc-beaa-31928f4763b3 req-b4c61bf7-cc2d-4217-ba50-62b7396ebc4f service nova] Acquired lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.225311] env[63241]: DEBUG nova.network.neutron [req-a3891e3e-b265-4fcc-beaa-31928f4763b3 req-b4c61bf7-cc2d-4217-ba50-62b7396ebc4f service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Refreshing network info cache for port 6bc11935-f0d5-456c-b815-ea415689a621 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1613.227559] env[63241]: DEBUG nova.compute.manager [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1613.255533] env[63241]: DEBUG nova.virt.hardware [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1613.255814] env[63241]: DEBUG nova.virt.hardware [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1613.256029] env[63241]: DEBUG nova.virt.hardware [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1613.256240] env[63241]: DEBUG nova.virt.hardware [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1613.256393] env[63241]: DEBUG nova.virt.hardware [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1613.256540] env[63241]: DEBUG nova.virt.hardware [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1613.256762] env[63241]: DEBUG nova.virt.hardware [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1613.256918] env[63241]: DEBUG nova.virt.hardware [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1613.257099] env[63241]: DEBUG nova.virt.hardware [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1613.257269] env[63241]: DEBUG nova.virt.hardware [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1613.257470] env[63241]: DEBUG nova.virt.hardware [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1613.258381] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7864c084-5736-4ed1-adff-1cb8e4a6fa2b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.266997] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5575e486-7a43-433f-9f1e-666ff5918d57 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.531580] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820497, 'name': CreateVM_Task, 'duration_secs': 0.323239} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.531775] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1613.532689] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.532925] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.533257] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1613.533528] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53a448ff-7181-42ba-ac31-3e02148ba359 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.538357] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1613.538357] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522abf0e-e316-0f74-8ce9-b628a174728f" [ 1613.538357] env[63241]: _type = "Task" [ 1613.538357] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.546919] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522abf0e-e316-0f74-8ce9-b628a174728f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.635660] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.636212] env[63241]: DEBUG nova.compute.manager [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1613.639602] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.896s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.641294] env[63241]: INFO nova.compute.claims [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1613.787434] env[63241]: DEBUG nova.network.neutron [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Successfully updated port: 10657b5b-6750-4389-b802-7e6bee8963e7 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1613.936868] env[63241]: DEBUG nova.network.neutron [req-a3891e3e-b265-4fcc-beaa-31928f4763b3 req-b4c61bf7-cc2d-4217-ba50-62b7396ebc4f service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updated VIF entry in instance network info cache for port 6bc11935-f0d5-456c-b815-ea415689a621. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1613.937043] env[63241]: DEBUG nova.network.neutron [req-a3891e3e-b265-4fcc-beaa-31928f4763b3 req-b4c61bf7-cc2d-4217-ba50-62b7396ebc4f service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updating instance_info_cache with network_info: [{"id": "6bc11935-f0d5-456c-b815-ea415689a621", "address": "fa:16:3e:de:e7:97", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bc11935-f0", "ovs_interfaceid": "6bc11935-f0d5-456c-b815-ea415689a621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.048373] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522abf0e-e316-0f74-8ce9-b628a174728f, 'name': SearchDatastore_Task, 'duration_secs': 0.011176} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.048695] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.048928] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1614.049190] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.049336] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.049514] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1614.049763] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-917aa814-3a0d-40b2-b0b5-dcc6c3b1d64e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.062173] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1614.062357] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1614.063111] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3ad717a-94d2-4d76-a947-6f84587edc7c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.068012] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1614.068012] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52984cce-8da2-def1-5eb6-3d95613d8fa6" [ 1614.068012] env[63241]: _type = "Task" [ 1614.068012] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.075332] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52984cce-8da2-def1-5eb6-3d95613d8fa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.151499] env[63241]: DEBUG nova.compute.utils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1614.152991] env[63241]: DEBUG nova.compute.manager [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1614.153216] env[63241]: DEBUG nova.network.neutron [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1614.192065] env[63241]: DEBUG nova.policy [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd7ceedc3c0d4420caedd242362d6e018', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '070783f39a14481698cc0477a60552b3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1614.293560] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.293755] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.293862] env[63241]: DEBUG nova.network.neutron [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1614.441717] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3891e3e-b265-4fcc-beaa-31928f4763b3 req-b4c61bf7-cc2d-4217-ba50-62b7396ebc4f service nova] Releasing lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.452114] env[63241]: DEBUG nova.network.neutron [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Successfully created port: 8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1614.581068] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52984cce-8da2-def1-5eb6-3d95613d8fa6, 'name': SearchDatastore_Task, 'duration_secs': 0.008238} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.581924] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2e41e3b-2d78-45e4-9144-d173d0affc20 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.587303] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1614.587303] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521a3afb-9268-41c8-f3d8-d948d5709c87" [ 1614.587303] env[63241]: _type = "Task" [ 1614.587303] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.597026] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521a3afb-9268-41c8-f3d8-d948d5709c87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.662886] env[63241]: DEBUG nova.compute.manager [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1614.841510] env[63241]: DEBUG nova.network.neutron [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1615.003124] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8816d2e7-7fa7-46c2-b3c5-77efce09a134 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.011095] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81686ab2-7439-433a-8da2-62b354c55665 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.041778] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146aaf6f-1c31-41fc-b492-8e1cf7a8aeb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.048830] env[63241]: DEBUG nova.network.neutron [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updating instance_info_cache with network_info: [{"id": "10657b5b-6750-4389-b802-7e6bee8963e7", "address": "fa:16:3e:ea:10:72", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10657b5b-67", "ovs_interfaceid": "10657b5b-6750-4389-b802-7e6bee8963e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1615.051383] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499fb698-ce7c-4dda-aa7e-5545f75f08b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.065791] env[63241]: DEBUG nova.compute.provider_tree [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1615.096868] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521a3afb-9268-41c8-f3d8-d948d5709c87, 'name': SearchDatastore_Task, 'duration_secs': 0.008932} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.097243] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.097534] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 31e84206-e583-4610-969e-2ccae2d0b206/31e84206-e583-4610-969e-2ccae2d0b206.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1615.097802] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c21dbe85-cb35-432f-bbbc-2bcc4666ea3f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.105258] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1615.105258] env[63241]: value = "task-1820498" [ 1615.105258] env[63241]: _type = "Task" [ 1615.105258] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.113255] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820498, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.249731] env[63241]: DEBUG nova.compute.manager [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received event network-vif-plugged-10657b5b-6750-4389-b802-7e6bee8963e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1615.250016] env[63241]: DEBUG oslo_concurrency.lockutils [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] Acquiring lock "9d301157-6870-4452-9ae6-0d45c4338886-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.250237] env[63241]: DEBUG oslo_concurrency.lockutils [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] Lock "9d301157-6870-4452-9ae6-0d45c4338886-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.250406] env[63241]: DEBUG oslo_concurrency.lockutils [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] Lock "9d301157-6870-4452-9ae6-0d45c4338886-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.250574] env[63241]: DEBUG nova.compute.manager [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] No waiting events found dispatching network-vif-plugged-10657b5b-6750-4389-b802-7e6bee8963e7 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1615.250737] env[63241]: WARNING nova.compute.manager [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received unexpected event network-vif-plugged-10657b5b-6750-4389-b802-7e6bee8963e7 for instance with vm_state building and task_state spawning. [ 1615.250909] env[63241]: DEBUG nova.compute.manager [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received event network-changed-10657b5b-6750-4389-b802-7e6bee8963e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1615.251300] env[63241]: DEBUG nova.compute.manager [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Refreshing instance network info cache due to event network-changed-10657b5b-6750-4389-b802-7e6bee8963e7. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1615.251378] env[63241]: DEBUG oslo_concurrency.lockutils [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] Acquiring lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.557062] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.557432] env[63241]: DEBUG nova.compute.manager [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Instance network_info: |[{"id": "10657b5b-6750-4389-b802-7e6bee8963e7", "address": "fa:16:3e:ea:10:72", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10657b5b-67", "ovs_interfaceid": "10657b5b-6750-4389-b802-7e6bee8963e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1615.557854] env[63241]: DEBUG oslo_concurrency.lockutils [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] Acquired lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.558095] env[63241]: DEBUG nova.network.neutron [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Refreshing network info cache for port 10657b5b-6750-4389-b802-7e6bee8963e7 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1615.559571] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:10:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f4a795c-8718-4a7c-aafe-9da231df10f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10657b5b-6750-4389-b802-7e6bee8963e7', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1615.567419] env[63241]: DEBUG oslo.service.loopingcall [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1615.570605] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1615.571414] env[63241]: DEBUG nova.scheduler.client.report [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1615.574750] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3448e4d-b8aa-493a-aa7e-3783d43b34fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.595365] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1615.595365] env[63241]: value = "task-1820499" [ 1615.595365] env[63241]: _type = "Task" [ 1615.595365] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.606208] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820499, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.614322] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820498, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460442} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.614535] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 31e84206-e583-4610-969e-2ccae2d0b206/31e84206-e583-4610-969e-2ccae2d0b206.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1615.614741] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1615.614973] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c686bdd7-183c-4679-9bb5-f5594db4685c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.620335] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1615.620335] env[63241]: value = "task-1820500" [ 1615.620335] env[63241]: _type = "Task" [ 1615.620335] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.630016] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820500, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.672391] env[63241]: DEBUG nova.compute.manager [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1615.699499] env[63241]: DEBUG nova.virt.hardware [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1615.699750] env[63241]: DEBUG nova.virt.hardware [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1615.699975] env[63241]: DEBUG nova.virt.hardware [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1615.700195] env[63241]: DEBUG nova.virt.hardware [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1615.700346] env[63241]: DEBUG nova.virt.hardware [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1615.700493] env[63241]: DEBUG nova.virt.hardware [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1615.700702] env[63241]: DEBUG nova.virt.hardware [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1615.700858] env[63241]: DEBUG nova.virt.hardware [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1615.701036] env[63241]: DEBUG nova.virt.hardware [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1615.701203] env[63241]: DEBUG nova.virt.hardware [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1615.701392] env[63241]: DEBUG nova.virt.hardware [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1615.702292] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b05e2b-856e-49ef-83ec-456943b5e76a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.715112] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb2b863-8bdb-47ae-bf7a-1a9e3b7669a6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.881230] env[63241]: DEBUG nova.compute.manager [req-594f6f79-b4c7-40db-a4aa-142e9e05d236 req-88d5c1d4-2143-4c99-806b-abc8c38a5765 service nova] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Received event network-vif-plugged-8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1615.881230] env[63241]: DEBUG oslo_concurrency.lockutils [req-594f6f79-b4c7-40db-a4aa-142e9e05d236 req-88d5c1d4-2143-4c99-806b-abc8c38a5765 service nova] Acquiring lock "cfdc6b34-6940-414f-b17d-6fe17f92474a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.881230] env[63241]: DEBUG oslo_concurrency.lockutils [req-594f6f79-b4c7-40db-a4aa-142e9e05d236 req-88d5c1d4-2143-4c99-806b-abc8c38a5765 service nova] Lock "cfdc6b34-6940-414f-b17d-6fe17f92474a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.881230] env[63241]: DEBUG oslo_concurrency.lockutils [req-594f6f79-b4c7-40db-a4aa-142e9e05d236 req-88d5c1d4-2143-4c99-806b-abc8c38a5765 service nova] Lock "cfdc6b34-6940-414f-b17d-6fe17f92474a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.881230] env[63241]: DEBUG nova.compute.manager [req-594f6f79-b4c7-40db-a4aa-142e9e05d236 req-88d5c1d4-2143-4c99-806b-abc8c38a5765 service nova] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] No waiting events found dispatching network-vif-plugged-8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1615.881230] env[63241]: WARNING nova.compute.manager [req-594f6f79-b4c7-40db-a4aa-142e9e05d236 req-88d5c1d4-2143-4c99-806b-abc8c38a5765 service nova] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Received unexpected event network-vif-plugged-8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d for instance with vm_state building and task_state spawning. [ 1615.953377] env[63241]: DEBUG nova.network.neutron [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updated VIF entry in instance network info cache for port 10657b5b-6750-4389-b802-7e6bee8963e7. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1615.953921] env[63241]: DEBUG nova.network.neutron [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updating instance_info_cache with network_info: [{"id": "10657b5b-6750-4389-b802-7e6bee8963e7", "address": "fa:16:3e:ea:10:72", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10657b5b-67", "ovs_interfaceid": "10657b5b-6750-4389-b802-7e6bee8963e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1615.959369] env[63241]: DEBUG nova.network.neutron [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Successfully updated port: 8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1616.075809] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.076369] env[63241]: DEBUG nova.compute.manager [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1616.078923] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.550s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.079384] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.081109] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.808s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.082532] env[63241]: INFO nova.compute.claims [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1616.107715] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820499, 'name': CreateVM_Task, 'duration_secs': 0.432996} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.108852] env[63241]: INFO nova.scheduler.client.report [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Deleted allocations for instance 943100f1-e702-4869-8c19-d81d39712ac5 [ 1616.109940] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1616.110757] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.110891] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.111287] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1616.111570] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e23ade9-e8f3-434f-ac12-c10b14eef2da {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.119467] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1616.119467] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526d19bd-aff3-d980-80e7-c8df02035c1f" [ 1616.119467] env[63241]: _type = "Task" [ 1616.119467] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.132344] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526d19bd-aff3-d980-80e7-c8df02035c1f, 'name': SearchDatastore_Task, 'duration_secs': 0.010389} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.135194] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.135422] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1616.135656] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.135897] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.135973] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1616.136275] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820500, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069598} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.136471] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a2234f8-66a1-4661-9b29-d11f55006508 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.138232] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1616.139236] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149f16db-f619-4d49-973c-eb790c0725ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.161402] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 31e84206-e583-4610-969e-2ccae2d0b206/31e84206-e583-4610-969e-2ccae2d0b206.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1616.163341] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30c6aa46-a291-4f7e-8b57-8d0fa28e0593 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.177114] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1616.177257] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1616.178984] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a474de15-42d8-44ae-bacf-b444b57bf4dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.184783] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1616.184783] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c670dc-fc88-86d6-d135-3684d7abb359" [ 1616.184783] env[63241]: _type = "Task" [ 1616.184783] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.186310] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1616.186310] env[63241]: value = "task-1820501" [ 1616.186310] env[63241]: _type = "Task" [ 1616.186310] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.197060] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c670dc-fc88-86d6-d135-3684d7abb359, 'name': SearchDatastore_Task, 'duration_secs': 0.008743} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.200535] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820501, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.200760] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-886782f3-7fdc-44ed-bb18-12a65e879901 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.206070] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1616.206070] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526c83fd-80ff-c802-2f5a-54c0572ddc05" [ 1616.206070] env[63241]: _type = "Task" [ 1616.206070] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.215583] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526c83fd-80ff-c802-2f5a-54c0572ddc05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.460038] env[63241]: DEBUG oslo_concurrency.lockutils [req-989232c8-ee5f-4b68-8ae2-aa5e078cd1a6 req-6a95fb8c-3c43-404f-be1c-547db3c4ae03 service nova] Releasing lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.462800] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquiring lock "refresh_cache-cfdc6b34-6940-414f-b17d-6fe17f92474a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.462800] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquired lock "refresh_cache-cfdc6b34-6940-414f-b17d-6fe17f92474a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.462800] env[63241]: DEBUG nova.network.neutron [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1616.587199] env[63241]: DEBUG nova.compute.utils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1616.591069] env[63241]: DEBUG nova.compute.manager [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1616.591146] env[63241]: DEBUG nova.network.neutron [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1616.620875] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dbd45a21-58ef-4234-978f-78c7e3a10aa6 tempest-ServersTestMultiNic-1918609820 tempest-ServersTestMultiNic-1918609820-project-member] Lock "943100f1-e702-4869-8c19-d81d39712ac5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.517s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.640188] env[63241]: DEBUG nova.policy [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53f691b52644488c832ce1224a079218', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e48fc59560ab47ae87be73ab11b13e7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1616.699609] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820501, 'name': ReconfigVM_Task, 'duration_secs': 0.255646} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.699901] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 31e84206-e583-4610-969e-2ccae2d0b206/31e84206-e583-4610-969e-2ccae2d0b206.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1616.700593] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f607467-2db9-4816-b99c-cccebf62530d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.707010] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1616.707010] env[63241]: value = "task-1820502" [ 1616.707010] env[63241]: _type = "Task" [ 1616.707010] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.719959] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820502, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.723061] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526c83fd-80ff-c802-2f5a-54c0572ddc05, 'name': SearchDatastore_Task, 'duration_secs': 0.009811} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.723856] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.723856] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 9d301157-6870-4452-9ae6-0d45c4338886/9d301157-6870-4452-9ae6-0d45c4338886.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1616.723856] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e529f83b-bf3d-48d1-a082-18e5b8e37621 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.734791] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1616.734791] env[63241]: value = "task-1820503" [ 1616.734791] env[63241]: _type = "Task" [ 1616.734791] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.742717] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820503, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.902681] env[63241]: DEBUG nova.network.neutron [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Successfully created port: 89afe680-269b-4e65-8447-6e663afc4dca {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1617.051133] env[63241]: DEBUG nova.network.neutron [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1617.092377] env[63241]: DEBUG nova.compute.manager [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1617.222982] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820502, 'name': Rename_Task, 'duration_secs': 0.134506} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.224892] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1617.227541] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23296e31-de39-4ca6-be3a-22ca24ab4483 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.236543] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1617.236543] env[63241]: value = "task-1820504" [ 1617.236543] env[63241]: _type = "Task" [ 1617.236543] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.244915] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820503, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473241} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.245615] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 9d301157-6870-4452-9ae6-0d45c4338886/9d301157-6870-4452-9ae6-0d45c4338886.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1617.245838] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1617.247436] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1c36a2a-9a6e-4738-8ee5-ebd913d79a36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.253017] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820504, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.257943] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1617.257943] env[63241]: value = "task-1820505" [ 1617.257943] env[63241]: _type = "Task" [ 1617.257943] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.270875] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.491206] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06e3871-8ce6-4cd6-b248-0ea8b7fe44f4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.501319] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416e54f0-7a7f-4f91-8c08-bbc6f4fe648c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.539603] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5b5390-3e67-41e8-87b4-44b530cc7121 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.547527] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c660630d-efc9-4e51-8b51-7657a8a65257 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.563888] env[63241]: DEBUG nova.compute.provider_tree [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1617.572466] env[63241]: DEBUG nova.network.neutron [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Updating instance_info_cache with network_info: [{"id": "8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d", "address": "fa:16:3e:13:73:2f", "network": {"id": "360cc5e1-f9c6-40ff-9ce8-cb34a24f0a63", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-368606810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "070783f39a14481698cc0477a60552b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d4032b3-2e", "ovs_interfaceid": "8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.746586] env[63241]: DEBUG oslo_vmware.api [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820504, 'name': PowerOnVM_Task, 'duration_secs': 0.441089} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.746845] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1617.747054] env[63241]: INFO nova.compute.manager [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Took 6.98 seconds to spawn the instance on the hypervisor. [ 1617.747240] env[63241]: DEBUG nova.compute.manager [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1617.748010] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edaf491c-2c58-4629-8d31-fc72acd9ef93 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.769825] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061743} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.770113] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1617.770925] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b206bc1f-a23f-40f8-a255-ebb286b00ed5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.792608] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 9d301157-6870-4452-9ae6-0d45c4338886/9d301157-6870-4452-9ae6-0d45c4338886.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1617.794165] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a34ac04b-2ca1-4807-b523-d6f98388c64d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.814909] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1617.814909] env[63241]: value = "task-1820506" [ 1617.814909] env[63241]: _type = "Task" [ 1617.814909] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.822866] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820506, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.907808] env[63241]: DEBUG nova.compute.manager [req-ef06fa15-8537-46a4-9b72-4860ed2d50d3 req-0830b25a-2a2f-4065-9fbe-b49ad5fe8601 service nova] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Received event network-changed-8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1617.908032] env[63241]: DEBUG nova.compute.manager [req-ef06fa15-8537-46a4-9b72-4860ed2d50d3 req-0830b25a-2a2f-4065-9fbe-b49ad5fe8601 service nova] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Refreshing instance network info cache due to event network-changed-8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1617.908237] env[63241]: DEBUG oslo_concurrency.lockutils [req-ef06fa15-8537-46a4-9b72-4860ed2d50d3 req-0830b25a-2a2f-4065-9fbe-b49ad5fe8601 service nova] Acquiring lock "refresh_cache-cfdc6b34-6940-414f-b17d-6fe17f92474a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.077088] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Releasing lock "refresh_cache-cfdc6b34-6940-414f-b17d-6fe17f92474a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1618.077088] env[63241]: DEBUG nova.compute.manager [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Instance network_info: |[{"id": "8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d", "address": "fa:16:3e:13:73:2f", "network": {"id": "360cc5e1-f9c6-40ff-9ce8-cb34a24f0a63", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-368606810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "070783f39a14481698cc0477a60552b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d4032b3-2e", "ovs_interfaceid": "8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1618.077088] env[63241]: DEBUG oslo_concurrency.lockutils [req-ef06fa15-8537-46a4-9b72-4860ed2d50d3 req-0830b25a-2a2f-4065-9fbe-b49ad5fe8601 service nova] Acquired lock "refresh_cache-cfdc6b34-6940-414f-b17d-6fe17f92474a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.077088] env[63241]: DEBUG nova.network.neutron [req-ef06fa15-8537-46a4-9b72-4860ed2d50d3 req-0830b25a-2a2f-4065-9fbe-b49ad5fe8601 service nova] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Refreshing network info cache for port 8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1618.077361] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:73:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1618.085029] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Creating folder: Project (070783f39a14481698cc0477a60552b3). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1618.085842] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-726bf7cb-a425-45e9-a249-52066d5aab4c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.096544] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Created folder: Project (070783f39a14481698cc0477a60552b3) in parent group-v376927. [ 1618.096771] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Creating folder: Instances. Parent ref: group-v377104. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1618.097512] env[63241]: DEBUG nova.scheduler.client.report [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1618.097746] env[63241]: DEBUG nova.compute.provider_tree [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 93 to 94 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1618.097924] env[63241]: DEBUG nova.compute.provider_tree [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1618.101214] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7e0df64-2188-4af5-8d57-e1090f8a0637 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.103647] env[63241]: DEBUG nova.compute.manager [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1618.114257] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Created folder: Instances in parent group-v377104. [ 1618.114498] env[63241]: DEBUG oslo.service.loopingcall [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1618.115292] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1618.115447] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c7f82da-4ffa-4225-9c76-1e7fb712aaf2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.137975] env[63241]: DEBUG nova.virt.hardware [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='5d9e02c6305ae2e0ef311f998fb9b831',container_format='bare',created_at=2024-12-12T15:26:50Z,direct_url=,disk_format='vmdk',id=329bc4d0-0ed9-4ffe-a843-80beee7f7bfa,min_disk=1,min_ram=0,name='tempest-test-snap-265910963',owner='e48fc59560ab47ae87be73ab11b13e7c',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-12T15:27:06Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1618.138263] env[63241]: DEBUG nova.virt.hardware [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1618.138440] env[63241]: DEBUG nova.virt.hardware [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1618.138633] env[63241]: DEBUG nova.virt.hardware [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1618.138821] env[63241]: DEBUG nova.virt.hardware [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1618.138942] env[63241]: DEBUG nova.virt.hardware [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1618.139182] env[63241]: DEBUG nova.virt.hardware [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1618.139394] env[63241]: DEBUG nova.virt.hardware [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1618.139583] env[63241]: DEBUG nova.virt.hardware [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1618.139787] env[63241]: DEBUG nova.virt.hardware [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1618.140022] env[63241]: DEBUG nova.virt.hardware [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1618.141156] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1135951f-869b-4ab7-aa6e-e0884861cf20 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.145103] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1618.145103] env[63241]: value = "task-1820509" [ 1618.145103] env[63241]: _type = "Task" [ 1618.145103] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.151995] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02ba8f3-0839-4b2b-9347-6edca6630f75 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.158918] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820509, 'name': CreateVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.263959] env[63241]: INFO nova.compute.manager [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Took 58.63 seconds to build instance. [ 1618.327404] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820506, 'name': ReconfigVM_Task, 'duration_secs': 0.285267} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.327695] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 9d301157-6870-4452-9ae6-0d45c4338886/9d301157-6870-4452-9ae6-0d45c4338886.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1618.328418] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5975d289-3264-4408-8539-a6feabbe7f54 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.334434] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1618.334434] env[63241]: value = "task-1820510" [ 1618.334434] env[63241]: _type = "Task" [ 1618.334434] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.345590] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820510, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.549027] env[63241]: DEBUG nova.network.neutron [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Successfully updated port: 89afe680-269b-4e65-8447-6e663afc4dca {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1618.604068] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.604604] env[63241]: DEBUG nova.compute.manager [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1618.610021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.981s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.610021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.610021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.383s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.610021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.612333] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.869s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.614347] env[63241]: INFO nova.compute.claims [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1618.647582] env[63241]: INFO nova.scheduler.client.report [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Deleted allocations for instance a534b054-2143-41c4-a0fa-028339ecdbbf [ 1618.651844] env[63241]: INFO nova.scheduler.client.report [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Deleted allocations for instance f4eb5e49-bae0-435c-93f0-15d6939f9e7c [ 1618.672900] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820509, 'name': CreateVM_Task, 'duration_secs': 0.43467} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.672900] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1618.673564] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.673735] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.674074] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1618.674339] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-755e8bc1-9635-48bf-adda-96bc69216cf8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.680179] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for the task: (returnval){ [ 1618.680179] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c06749-7575-60c1-4afb-567b464afae0" [ 1618.680179] env[63241]: _type = "Task" [ 1618.680179] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.689238] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c06749-7575-60c1-4afb-567b464afae0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.766509] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8f4db41f-914c-49ce-939e-cc8793e9a1c5 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "31e84206-e583-4610-969e-2ccae2d0b206" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.153s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.847266] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820510, 'name': Rename_Task, 'duration_secs': 0.201079} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.847266] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1618.847266] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85181b9e-d049-486a-8b56-75a7845f61b5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.851950] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1618.851950] env[63241]: value = "task-1820511" [ 1618.851950] env[63241]: _type = "Task" [ 1618.851950] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.860498] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820511, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.053680] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "refresh_cache-f583adda-976e-4f79-adc7-0b4e1a73ad73" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.053680] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "refresh_cache-f583adda-976e-4f79-adc7-0b4e1a73ad73" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.053680] env[63241]: DEBUG nova.network.neutron [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1619.080515] env[63241]: DEBUG nova.network.neutron [req-ef06fa15-8537-46a4-9b72-4860ed2d50d3 req-0830b25a-2a2f-4065-9fbe-b49ad5fe8601 service nova] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Updated VIF entry in instance network info cache for port 8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1619.080906] env[63241]: DEBUG nova.network.neutron [req-ef06fa15-8537-46a4-9b72-4860ed2d50d3 req-0830b25a-2a2f-4065-9fbe-b49ad5fe8601 service nova] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Updating instance_info_cache with network_info: [{"id": "8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d", "address": "fa:16:3e:13:73:2f", "network": {"id": "360cc5e1-f9c6-40ff-9ce8-cb34a24f0a63", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-368606810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "070783f39a14481698cc0477a60552b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d4032b3-2e", "ovs_interfaceid": "8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.124909] env[63241]: DEBUG nova.compute.utils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1619.128742] env[63241]: DEBUG nova.compute.manager [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1619.128742] env[63241]: DEBUG nova.network.neutron [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1619.164072] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1a36c243-b95a-4022-b3ff-40ea13194d7c tempest-ImagesOneServerTestJSON-882626409 tempest-ImagesOneServerTestJSON-882626409-project-member] Lock "a534b054-2143-41c4-a0fa-028339ecdbbf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.482s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.169149] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8815705-3fdc-4736-8d5f-4e63fb9b6068 tempest-InstanceActionsV221TestJSON-844352268 tempest-InstanceActionsV221TestJSON-844352268-project-member] Lock "f4eb5e49-bae0-435c-93f0-15d6939f9e7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.014s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.195038] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c06749-7575-60c1-4afb-567b464afae0, 'name': SearchDatastore_Task, 'duration_secs': 0.009924} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.195622] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.196231] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1619.199023] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.199023] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.199023] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1619.199023] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12e6783d-5877-4d3b-ac46-19fb2cfc152d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.208261] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1619.208781] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1619.209630] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa97f244-ce08-4149-9830-f016922b2a34 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.215536] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for the task: (returnval){ [ 1619.215536] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f8c759-8740-bb55-8755-ea14c0e37f9f" [ 1619.215536] env[63241]: _type = "Task" [ 1619.215536] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.224455] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f8c759-8740-bb55-8755-ea14c0e37f9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.265782] env[63241]: DEBUG nova.policy [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f52bf5741a490c83e01e06f686559e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c64d07a686b414f93ec4c599307498f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1619.363101] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820511, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.584559] env[63241]: DEBUG oslo_concurrency.lockutils [req-ef06fa15-8537-46a4-9b72-4860ed2d50d3 req-0830b25a-2a2f-4065-9fbe-b49ad5fe8601 service nova] Releasing lock "refresh_cache-cfdc6b34-6940-414f-b17d-6fe17f92474a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.628114] env[63241]: DEBUG nova.compute.manager [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1619.639622] env[63241]: DEBUG nova.network.neutron [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1619.730875] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f8c759-8740-bb55-8755-ea14c0e37f9f, 'name': SearchDatastore_Task, 'duration_secs': 0.010522} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.734840] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5c8a1d8-a594-497b-ad83-c69ac186b379 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.744447] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for the task: (returnval){ [ 1619.744447] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526c0d53-1477-44e8-15a3-1efe7f40082e" [ 1619.744447] env[63241]: _type = "Task" [ 1619.744447] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.754109] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526c0d53-1477-44e8-15a3-1efe7f40082e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.876121] env[63241]: DEBUG oslo_vmware.api [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820511, 'name': PowerOnVM_Task, 'duration_secs': 0.518039} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.876414] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1619.876620] env[63241]: INFO nova.compute.manager [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Took 6.65 seconds to spawn the instance on the hypervisor. [ 1619.876902] env[63241]: DEBUG nova.compute.manager [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1619.877622] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b012000-8c99-4582-8b0f-8d78d780a9fa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.960887] env[63241]: DEBUG nova.network.neutron [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Successfully created port: 8754391e-9ab8-421b-995a-d10260d260c6 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1620.032276] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f81015-6f83-44e2-b76c-7b2569a4edcf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.038304] env[63241]: DEBUG nova.network.neutron [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Updating instance_info_cache with network_info: [{"id": "89afe680-269b-4e65-8447-6e663afc4dca", "address": "fa:16:3e:c8:9e:4a", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89afe680-26", "ovs_interfaceid": "89afe680-269b-4e65-8447-6e663afc4dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1620.043237] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328907da-7607-4344-b56a-023c73f52021 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.081301] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83dbba5-3a70-4cc0-8357-6fa25319bb5d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.091852] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc089df9-356b-4981-b44d-65c984f9ec3d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.108304] env[63241]: DEBUG nova.compute.provider_tree [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1620.117575] env[63241]: DEBUG nova.compute.manager [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Received event network-vif-plugged-89afe680-269b-4e65-8447-6e663afc4dca {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1620.118215] env[63241]: DEBUG oslo_concurrency.lockutils [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] Acquiring lock "f583adda-976e-4f79-adc7-0b4e1a73ad73-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.118215] env[63241]: DEBUG oslo_concurrency.lockutils [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] Lock "f583adda-976e-4f79-adc7-0b4e1a73ad73-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.118465] env[63241]: DEBUG oslo_concurrency.lockutils [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] Lock "f583adda-976e-4f79-adc7-0b4e1a73ad73-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.118724] env[63241]: DEBUG nova.compute.manager [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] No waiting events found dispatching network-vif-plugged-89afe680-269b-4e65-8447-6e663afc4dca {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1620.118850] env[63241]: WARNING nova.compute.manager [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Received unexpected event network-vif-plugged-89afe680-269b-4e65-8447-6e663afc4dca for instance with vm_state building and task_state spawning. [ 1620.118936] env[63241]: DEBUG nova.compute.manager [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Received event network-changed-89afe680-269b-4e65-8447-6e663afc4dca {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1620.119169] env[63241]: DEBUG nova.compute.manager [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Refreshing instance network info cache due to event network-changed-89afe680-269b-4e65-8447-6e663afc4dca. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1620.119373] env[63241]: DEBUG oslo_concurrency.lockutils [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] Acquiring lock "refresh_cache-f583adda-976e-4f79-adc7-0b4e1a73ad73" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.260787] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526c0d53-1477-44e8-15a3-1efe7f40082e, 'name': SearchDatastore_Task, 'duration_secs': 0.009866} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.261186] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.261383] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] cfdc6b34-6940-414f-b17d-6fe17f92474a/cfdc6b34-6940-414f-b17d-6fe17f92474a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1620.261677] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-773e87d5-8a8e-4d3c-a70e-1880c61f86e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.270443] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for the task: (returnval){ [ 1620.270443] env[63241]: value = "task-1820512" [ 1620.270443] env[63241]: _type = "Task" [ 1620.270443] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.279013] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820512, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.400545] env[63241]: INFO nova.compute.manager [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Took 60.39 seconds to build instance. [ 1620.545068] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "refresh_cache-f583adda-976e-4f79-adc7-0b4e1a73ad73" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.545068] env[63241]: DEBUG nova.compute.manager [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Instance network_info: |[{"id": "89afe680-269b-4e65-8447-6e663afc4dca", "address": "fa:16:3e:c8:9e:4a", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89afe680-26", "ovs_interfaceid": "89afe680-269b-4e65-8447-6e663afc4dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1620.545068] env[63241]: DEBUG oslo_concurrency.lockutils [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] Acquired lock "refresh_cache-f583adda-976e-4f79-adc7-0b4e1a73ad73" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.545068] env[63241]: DEBUG nova.network.neutron [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Refreshing network info cache for port 89afe680-269b-4e65-8447-6e663afc4dca {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1620.547282] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:9e:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89afe680-269b-4e65-8447-6e663afc4dca', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1620.559042] env[63241]: DEBUG oslo.service.loopingcall [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1620.560389] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1620.560734] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38945a2a-dc05-464f-b22d-af9057b62cf5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.593681] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1620.593681] env[63241]: value = "task-1820513" [ 1620.593681] env[63241]: _type = "Task" [ 1620.593681] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.604956] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820513, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.611622] env[63241]: DEBUG nova.scheduler.client.report [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1620.647452] env[63241]: DEBUG nova.compute.manager [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1620.683533] env[63241]: DEBUG nova.virt.hardware [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1620.683859] env[63241]: DEBUG nova.virt.hardware [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1620.684792] env[63241]: DEBUG nova.virt.hardware [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1620.684792] env[63241]: DEBUG nova.virt.hardware [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1620.684957] env[63241]: DEBUG nova.virt.hardware [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1620.685171] env[63241]: DEBUG nova.virt.hardware [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1620.685420] env[63241]: DEBUG nova.virt.hardware [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1620.685594] env[63241]: DEBUG nova.virt.hardware [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1620.685820] env[63241]: DEBUG nova.virt.hardware [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1620.686267] env[63241]: DEBUG nova.virt.hardware [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1620.686533] env[63241]: DEBUG nova.virt.hardware [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1620.687806] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab81e80-1cec-4d15-81ff-c12292186c8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.699272] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171f2b7d-60b5-4106-a53b-b4c76ad43b91 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.783048] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820512, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509151} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.783339] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] cfdc6b34-6940-414f-b17d-6fe17f92474a/cfdc6b34-6940-414f-b17d-6fe17f92474a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1620.783591] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1620.783852] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a81b9402-e8bf-4575-8a81-96ea01068389 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.790495] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for the task: (returnval){ [ 1620.790495] env[63241]: value = "task-1820514" [ 1620.790495] env[63241]: _type = "Task" [ 1620.790495] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.799082] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820514, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.902450] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fe56a416-11e1-49fc-8cf6-6443c64dc6eb tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "9d301157-6870-4452-9ae6-0d45c4338886" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.902s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.105781] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820513, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.116959] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.504s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.117531] env[63241]: DEBUG nova.compute.manager [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1621.120419] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.992s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.121133] env[63241]: DEBUG nova.objects.instance [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1621.301521] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820514, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.146575} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.301890] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1621.303247] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4854b5d6-96ee-41fc-9193-a9e3d4133957 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.329121] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] cfdc6b34-6940-414f-b17d-6fe17f92474a/cfdc6b34-6940-414f-b17d-6fe17f92474a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1621.329632] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-994063fd-4570-4646-8539-413610502887 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.356837] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for the task: (returnval){ [ 1621.356837] env[63241]: value = "task-1820515" [ 1621.356837] env[63241]: _type = "Task" [ 1621.356837] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.365886] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820515, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.607047] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820513, 'name': CreateVM_Task, 'duration_secs': 0.538779} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.607196] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1621.607904] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.608135] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.608473] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1621.609336] env[63241]: DEBUG nova.network.neutron [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Updated VIF entry in instance network info cache for port 89afe680-269b-4e65-8447-6e663afc4dca. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1621.609948] env[63241]: DEBUG nova.network.neutron [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Updating instance_info_cache with network_info: [{"id": "89afe680-269b-4e65-8447-6e663afc4dca", "address": "fa:16:3e:c8:9e:4a", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89afe680-26", "ovs_interfaceid": "89afe680-269b-4e65-8447-6e663afc4dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.610836] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40545229-1ed3-448e-8f8c-781cb745f20c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.616496] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1621.616496] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52872b60-27aa-597b-6c5b-78bf58aa891a" [ 1621.616496] env[63241]: _type = "Task" [ 1621.616496] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.624184] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52872b60-27aa-597b-6c5b-78bf58aa891a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.631908] env[63241]: DEBUG nova.compute.utils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1621.635208] env[63241]: DEBUG nova.compute.manager [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1621.635208] env[63241]: DEBUG nova.network.neutron [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1621.738068] env[63241]: DEBUG nova.policy [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e74fed52b794bb397452b4aeb201c07', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa6725d063734b7183d5a1ef4106ef60', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1621.870946] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820515, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.114450] env[63241]: DEBUG oslo_concurrency.lockutils [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] Releasing lock "refresh_cache-f583adda-976e-4f79-adc7-0b4e1a73ad73" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.114786] env[63241]: DEBUG nova.compute.manager [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Received event network-changed-6bc11935-f0d5-456c-b815-ea415689a621 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1622.114960] env[63241]: DEBUG nova.compute.manager [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Refreshing instance network info cache due to event network-changed-6bc11935-f0d5-456c-b815-ea415689a621. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1622.115194] env[63241]: DEBUG oslo_concurrency.lockutils [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] Acquiring lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.115339] env[63241]: DEBUG oslo_concurrency.lockutils [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] Acquired lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.115499] env[63241]: DEBUG nova.network.neutron [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Refreshing network info cache for port 6bc11935-f0d5-456c-b815-ea415689a621 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1622.134555] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ccfa560c-d21f-4d56-8620-1b39f3a07791 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.135761] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.136052] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Processing image 329bc4d0-0ed9-4ffe-a843-80beee7f7bfa {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1622.136375] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.136611] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.136900] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1622.137451] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.639s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.139170] env[63241]: INFO nova.compute.claims [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1622.143331] env[63241]: DEBUG nova.compute.manager [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1622.149420] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09875876-271c-4b10-9d9d-28f9dc579fc3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.166105] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1622.166374] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1622.167493] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0166dd1a-384c-4ac0-8101-87ccf0f11ef2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.174872] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1622.174872] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5238ab9b-8658-8703-ad20-be6d3f769011" [ 1622.174872] env[63241]: _type = "Task" [ 1622.174872] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.185704] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5238ab9b-8658-8703-ad20-be6d3f769011, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.265088] env[63241]: DEBUG nova.compute.manager [req-f10e94cb-1e37-476e-a09f-73f77eb1e982 req-0cf067df-9c8b-489a-88c6-89669b5946b1 service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Received event network-vif-plugged-8754391e-9ab8-421b-995a-d10260d260c6 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1622.265088] env[63241]: DEBUG oslo_concurrency.lockutils [req-f10e94cb-1e37-476e-a09f-73f77eb1e982 req-0cf067df-9c8b-489a-88c6-89669b5946b1 service nova] Acquiring lock "1626092d-78ef-41b5-8b47-fb840d63e4f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.265088] env[63241]: DEBUG oslo_concurrency.lockutils [req-f10e94cb-1e37-476e-a09f-73f77eb1e982 req-0cf067df-9c8b-489a-88c6-89669b5946b1 service nova] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.266856] env[63241]: DEBUG oslo_concurrency.lockutils [req-f10e94cb-1e37-476e-a09f-73f77eb1e982 req-0cf067df-9c8b-489a-88c6-89669b5946b1 service nova] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.267232] env[63241]: DEBUG nova.compute.manager [req-f10e94cb-1e37-476e-a09f-73f77eb1e982 req-0cf067df-9c8b-489a-88c6-89669b5946b1 service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] No waiting events found dispatching network-vif-plugged-8754391e-9ab8-421b-995a-d10260d260c6 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1622.267426] env[63241]: WARNING nova.compute.manager [req-f10e94cb-1e37-476e-a09f-73f77eb1e982 req-0cf067df-9c8b-489a-88c6-89669b5946b1 service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Received unexpected event network-vif-plugged-8754391e-9ab8-421b-995a-d10260d260c6 for instance with vm_state building and task_state spawning. [ 1622.367311] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820515, 'name': ReconfigVM_Task, 'duration_secs': 0.838156} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.368181] env[63241]: DEBUG nova.network.neutron [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Successfully created port: 871c0af7-a67d-4b89-b5aa-bc46ce52b6b5 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1622.370161] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Reconfigured VM instance instance-0000003f to attach disk [datastore1] cfdc6b34-6940-414f-b17d-6fe17f92474a/cfdc6b34-6940-414f-b17d-6fe17f92474a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1622.370780] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d40bafc-0312-4e9f-bde8-88ef1f569263 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.377470] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for the task: (returnval){ [ 1622.377470] env[63241]: value = "task-1820516" [ 1622.377470] env[63241]: _type = "Task" [ 1622.377470] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.392581] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820516, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.404532] env[63241]: DEBUG nova.network.neutron [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Successfully updated port: 8754391e-9ab8-421b-995a-d10260d260c6 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1622.687754] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Preparing fetch location {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1622.688073] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Fetch image to [datastore1] OSTACK_IMG_a4c834fc-7975-4643-ba2e-bdc502c97bf9/OSTACK_IMG_a4c834fc-7975-4643-ba2e-bdc502c97bf9.vmdk {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1622.688276] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Downloading stream optimized image 329bc4d0-0ed9-4ffe-a843-80beee7f7bfa to [datastore1] OSTACK_IMG_a4c834fc-7975-4643-ba2e-bdc502c97bf9/OSTACK_IMG_a4c834fc-7975-4643-ba2e-bdc502c97bf9.vmdk on the data store datastore1 as vApp {{(pid=63241) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1622.688482] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Downloading image file data 329bc4d0-0ed9-4ffe-a843-80beee7f7bfa to the ESX as VM named 'OSTACK_IMG_a4c834fc-7975-4643-ba2e-bdc502c97bf9' {{(pid=63241) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1622.784588] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1622.784588] env[63241]: value = "resgroup-9" [ 1622.784588] env[63241]: _type = "ResourcePool" [ 1622.784588] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1622.785266] env[63241]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-2b43b259-c361-4cf1-94a5-2c5401a7d604 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.810713] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lease: (returnval){ [ 1622.810713] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b5ec34-6a38-d972-0197-0db8f690bb72" [ 1622.810713] env[63241]: _type = "HttpNfcLease" [ 1622.810713] env[63241]: } obtained for vApp import into resource pool (val){ [ 1622.810713] env[63241]: value = "resgroup-9" [ 1622.810713] env[63241]: _type = "ResourcePool" [ 1622.810713] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1622.811128] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the lease: (returnval){ [ 1622.811128] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b5ec34-6a38-d972-0197-0db8f690bb72" [ 1622.811128] env[63241]: _type = "HttpNfcLease" [ 1622.811128] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1622.818426] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1622.818426] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b5ec34-6a38-d972-0197-0db8f690bb72" [ 1622.818426] env[63241]: _type = "HttpNfcLease" [ 1622.818426] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1622.894350] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820516, 'name': Rename_Task, 'duration_secs': 0.188897} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.894350] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1622.894350] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ab317e2-74c9-43ad-bdd7-98a2f8e9f652 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.900980] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for the task: (returnval){ [ 1622.900980] env[63241]: value = "task-1820518" [ 1622.900980] env[63241]: _type = "Task" [ 1622.900980] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.908412] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.908543] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.908791] env[63241]: DEBUG nova.network.neutron [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1622.909854] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820518, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.166017] env[63241]: DEBUG nova.compute.manager [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1623.202685] env[63241]: DEBUG nova.virt.hardware [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1623.202867] env[63241]: DEBUG nova.virt.hardware [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1623.203035] env[63241]: DEBUG nova.virt.hardware [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1623.203243] env[63241]: DEBUG nova.virt.hardware [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1623.203466] env[63241]: DEBUG nova.virt.hardware [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1623.203600] env[63241]: DEBUG nova.virt.hardware [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1623.203810] env[63241]: DEBUG nova.virt.hardware [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1623.203963] env[63241]: DEBUG nova.virt.hardware [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1623.204709] env[63241]: DEBUG nova.virt.hardware [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1623.204709] env[63241]: DEBUG nova.virt.hardware [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1623.205068] env[63241]: DEBUG nova.virt.hardware [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1623.206103] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665a4981-6bdf-40a7-b77e-5c5ce30c45a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.222579] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae866c8-0d4e-45bb-99a5-55903d3ba842 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.321573] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1623.321573] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b5ec34-6a38-d972-0197-0db8f690bb72" [ 1623.321573] env[63241]: _type = "HttpNfcLease" [ 1623.321573] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1623.379498] env[63241]: DEBUG nova.network.neutron [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updated VIF entry in instance network info cache for port 6bc11935-f0d5-456c-b815-ea415689a621. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1623.379986] env[63241]: DEBUG nova.network.neutron [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updating instance_info_cache with network_info: [{"id": "6bc11935-f0d5-456c-b815-ea415689a621", "address": "fa:16:3e:de:e7:97", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bc11935-f0", "ovs_interfaceid": "6bc11935-f0d5-456c-b815-ea415689a621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.412629] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820518, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.471530] env[63241]: DEBUG nova.network.neutron [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1623.582081] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1edf149e-50d7-41e9-bc4e-e252a5054f28 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.590972] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b038ce4c-58d1-4da6-be15-ed6e1aad4707 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.635954] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d80e9e5-f11e-4c21-84b3-4bfe362eb3af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.644730] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79591a4c-6b28-46a9-8165-46d6fa54dcac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.663730] env[63241]: DEBUG nova.compute.provider_tree [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1623.680337] env[63241]: DEBUG nova.network.neutron [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Updating instance_info_cache with network_info: [{"id": "8754391e-9ab8-421b-995a-d10260d260c6", "address": "fa:16:3e:e1:e9:0a", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8754391e-9a", "ovs_interfaceid": "8754391e-9ab8-421b-995a-d10260d260c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.824183] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1623.824183] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b5ec34-6a38-d972-0197-0db8f690bb72" [ 1623.824183] env[63241]: _type = "HttpNfcLease" [ 1623.824183] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1623.824849] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1623.824849] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b5ec34-6a38-d972-0197-0db8f690bb72" [ 1623.824849] env[63241]: _type = "HttpNfcLease" [ 1623.824849] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1623.825188] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93007ac-af60-43db-b1a5-dd0a0ac8a034 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.833834] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c99f05-263f-1fa7-0bcb-74b63ae276e5/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1623.833834] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c99f05-263f-1fa7-0bcb-74b63ae276e5/disk-0.vmdk. {{(pid=63241) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1623.909516] env[63241]: DEBUG oslo_concurrency.lockutils [req-f6f2d515-5bbe-4308-9ec0-163d993c4cac req-19631055-2a61-43c8-b959-2cdc45f0ede2 service nova] Releasing lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.922916] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-00b0d1bc-c1ce-4fdb-864f-4aa9919ad364 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.925029] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820518, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.169171] env[63241]: DEBUG nova.scheduler.client.report [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1624.183404] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.183756] env[63241]: DEBUG nova.compute.manager [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Instance network_info: |[{"id": "8754391e-9ab8-421b-995a-d10260d260c6", "address": "fa:16:3e:e1:e9:0a", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8754391e-9a", "ovs_interfaceid": "8754391e-9ab8-421b-995a-d10260d260c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1624.184484] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:e9:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dacd109c-2442-41b8-b612-7ed3efbdaa94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8754391e-9ab8-421b-995a-d10260d260c6', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1624.194270] env[63241]: DEBUG oslo.service.loopingcall [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1624.196651] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1624.196711] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-348732a2-da08-41b1-96e4-4869863a1fa7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.224378] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1624.224378] env[63241]: value = "task-1820519" [ 1624.224378] env[63241]: _type = "Task" [ 1624.224378] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.234604] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820519, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.301373] env[63241]: DEBUG nova.compute.manager [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Received event network-changed-8754391e-9ab8-421b-995a-d10260d260c6 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1624.301527] env[63241]: DEBUG nova.compute.manager [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Refreshing instance network info cache due to event network-changed-8754391e-9ab8-421b-995a-d10260d260c6. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1624.301689] env[63241]: DEBUG oslo_concurrency.lockutils [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] Acquiring lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.301833] env[63241]: DEBUG oslo_concurrency.lockutils [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] Acquired lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.301999] env[63241]: DEBUG nova.network.neutron [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Refreshing network info cache for port 8754391e-9ab8-421b-995a-d10260d260c6 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1624.426023] env[63241]: DEBUG oslo_vmware.api [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820518, 'name': PowerOnVM_Task, 'duration_secs': 1.196207} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.426023] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1624.426694] env[63241]: INFO nova.compute.manager [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Took 8.75 seconds to spawn the instance on the hypervisor. [ 1624.427217] env[63241]: DEBUG nova.compute.manager [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1624.428281] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cc49b6-4ec3-4cf3-a58b-8025e1de954f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.676064] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.538s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.676713] env[63241]: DEBUG nova.compute.manager [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1624.680639] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.654s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.680924] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.685052] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.024s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.685396] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.002s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.689015] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.416s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.694119] env[63241]: INFO nova.compute.claims [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1624.742851] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820519, 'name': CreateVM_Task, 'duration_secs': 0.42617} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.744192] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1624.745320] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.745720] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.746227] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1624.746859] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22ab1da1-d646-4897-acb4-db0b4409accc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.753734] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1624.753734] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527d9865-3ece-bb1e-f018-98cb65db1ee9" [ 1624.753734] env[63241]: _type = "Task" [ 1624.753734] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.768619] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527d9865-3ece-bb1e-f018-98cb65db1ee9, 'name': SearchDatastore_Task, 'duration_secs': 0.010873} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.771690] env[63241]: INFO nova.scheduler.client.report [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleted allocations for instance ac35fa03-aeca-4e18-84ab-cb80bb4cabfd [ 1624.773750] env[63241]: INFO nova.scheduler.client.report [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted allocations for instance 34d138e6-90b3-4243-bf45-96ae856cd631 [ 1624.775022] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.775504] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1624.775833] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.776169] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.776388] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1624.784689] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60729ec1-58bb-4857-8bff-a6de0dc8ab80 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.795523] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1624.796356] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1624.796495] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2173e876-3677-4c89-a99e-9dca45c7fef5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.803579] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1624.803579] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520806fa-b754-97c3-24f0-303e43892060" [ 1624.803579] env[63241]: _type = "Task" [ 1624.803579] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.821350] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520806fa-b754-97c3-24f0-303e43892060, 'name': SearchDatastore_Task, 'duration_secs': 0.010216} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.824227] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3a31570-a192-4b95-82fd-ddb662d8d0e7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.831064] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1624.831064] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527319ba-8e15-5dd6-2214-e36e425797ec" [ 1624.831064] env[63241]: _type = "Task" [ 1624.831064] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.832238] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Completed reading data from the image iterator. {{(pid=63241) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1624.832609] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c99f05-263f-1fa7-0bcb-74b63ae276e5/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1624.834120] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15dcb4b-3b7e-4921-98a1-d7591c01e238 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.853121] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c99f05-263f-1fa7-0bcb-74b63ae276e5/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1624.853331] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c99f05-263f-1fa7-0bcb-74b63ae276e5/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1624.853595] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527319ba-8e15-5dd6-2214-e36e425797ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009898} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.853788] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-78997e17-6e0a-4a46-8ca2-548d398d22e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.855519] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.856765] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 1626092d-78ef-41b5-8b47-fb840d63e4f4/1626092d-78ef-41b5-8b47-fb840d63e4f4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1624.856765] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e201b598-8bdd-4d0b-b117-07b294a13130 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.864572] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1624.864572] env[63241]: value = "task-1820520" [ 1624.864572] env[63241]: _type = "Task" [ 1624.864572] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.871652] env[63241]: DEBUG nova.compute.manager [req-9f57cfaa-890d-4bc9-b892-37b9152311d0 req-cd8c0ae1-4206-4d6f-81d2-e24fe909e663 service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Received event network-vif-plugged-871c0af7-a67d-4b89-b5aa-bc46ce52b6b5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1624.871652] env[63241]: DEBUG oslo_concurrency.lockutils [req-9f57cfaa-890d-4bc9-b892-37b9152311d0 req-cd8c0ae1-4206-4d6f-81d2-e24fe909e663 service nova] Acquiring lock "20c7a1a1-4396-414f-a52c-06551722b6eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.871652] env[63241]: DEBUG oslo_concurrency.lockutils [req-9f57cfaa-890d-4bc9-b892-37b9152311d0 req-cd8c0ae1-4206-4d6f-81d2-e24fe909e663 service nova] Lock "20c7a1a1-4396-414f-a52c-06551722b6eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.871652] env[63241]: DEBUG oslo_concurrency.lockutils [req-9f57cfaa-890d-4bc9-b892-37b9152311d0 req-cd8c0ae1-4206-4d6f-81d2-e24fe909e663 service nova] Lock "20c7a1a1-4396-414f-a52c-06551722b6eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.871952] env[63241]: DEBUG nova.compute.manager [req-9f57cfaa-890d-4bc9-b892-37b9152311d0 req-cd8c0ae1-4206-4d6f-81d2-e24fe909e663 service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] No waiting events found dispatching network-vif-plugged-871c0af7-a67d-4b89-b5aa-bc46ce52b6b5 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1624.871952] env[63241]: WARNING nova.compute.manager [req-9f57cfaa-890d-4bc9-b892-37b9152311d0 req-cd8c0ae1-4206-4d6f-81d2-e24fe909e663 service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Received unexpected event network-vif-plugged-871c0af7-a67d-4b89-b5aa-bc46ce52b6b5 for instance with vm_state building and task_state spawning. [ 1624.875704] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.960160] env[63241]: INFO nova.compute.manager [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Took 53.27 seconds to build instance. [ 1625.028662] env[63241]: DEBUG nova.network.neutron [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Successfully updated port: 871c0af7-a67d-4b89-b5aa-bc46ce52b6b5 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1625.072745] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c99f05-263f-1fa7-0bcb-74b63ae276e5/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1625.072990] env[63241]: INFO nova.virt.vmwareapi.images [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Downloaded image file data 329bc4d0-0ed9-4ffe-a843-80beee7f7bfa [ 1625.074212] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55064ced-6c77-443c-aaee-6bf82e10e2df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.099711] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5c6656ac-26a9-4ada-9c66-4f9b07bb2998 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.177027] env[63241]: INFO nova.virt.vmwareapi.images [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] The imported VM was unregistered [ 1625.180603] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Caching image {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1625.181136] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating directory with path [datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1625.181490] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39375a80-8206-4801-82d1-18c84511d8c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.204534] env[63241]: DEBUG nova.compute.utils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1625.210799] env[63241]: DEBUG nova.compute.manager [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1625.211396] env[63241]: DEBUG nova.network.neutron [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1625.215745] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Created directory with path [datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1625.215958] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_a4c834fc-7975-4643-ba2e-bdc502c97bf9/OSTACK_IMG_a4c834fc-7975-4643-ba2e-bdc502c97bf9.vmdk to [datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa.vmdk. {{(pid=63241) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1625.216680] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-5c818a87-2291-40a1-84ad-83f4430e8219 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.226303] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1625.226303] env[63241]: value = "task-1820522" [ 1625.226303] env[63241]: _type = "Task" [ 1625.226303] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.245831] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820522, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.294136] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5cca299-bb08-4098-91f2-265932cbe3d8 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "34d138e6-90b3-4243-bf45-96ae856cd631" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.705s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.295974] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d69209d0-8b1f-49ac-a8ef-60346d0cf6fd tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "ac35fa03-aeca-4e18-84ab-cb80bb4cabfd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.164s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.375811] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820520, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.377660] env[63241]: DEBUG nova.policy [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6b1519467304fc5bb33d508c55348be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '854490ce445a413d85901cfe6b091346', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1625.414045] env[63241]: DEBUG nova.network.neutron [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Updated VIF entry in instance network info cache for port 8754391e-9ab8-421b-995a-d10260d260c6. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1625.414045] env[63241]: DEBUG nova.network.neutron [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Updating instance_info_cache with network_info: [{"id": "8754391e-9ab8-421b-995a-d10260d260c6", "address": "fa:16:3e:e1:e9:0a", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8754391e-9a", "ovs_interfaceid": "8754391e-9ab8-421b-995a-d10260d260c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.463583] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2d98a404-6de8-439e-ab84-63c01bd79637 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Lock "cfdc6b34-6940-414f-b17d-6fe17f92474a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.780s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.531444] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquiring lock "refresh_cache-20c7a1a1-4396-414f-a52c-06551722b6eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.531645] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquired lock "refresh_cache-20c7a1a1-4396-414f-a52c-06551722b6eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.531882] env[63241]: DEBUG nova.network.neutron [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1625.711349] env[63241]: DEBUG nova.compute.manager [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1625.741409] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820522, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.823255] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "0e5447fd-a04f-4bc2-b329-e015883773b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.823255] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "0e5447fd-a04f-4bc2-b329-e015883773b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.823255] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "0e5447fd-a04f-4bc2-b329-e015883773b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.823255] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "0e5447fd-a04f-4bc2-b329-e015883773b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.823255] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "0e5447fd-a04f-4bc2-b329-e015883773b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.825586] env[63241]: INFO nova.compute.manager [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Terminating instance [ 1625.828674] env[63241]: DEBUG nova.compute.manager [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1625.828674] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1625.829921] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9193fc0-5176-413a-8252-fb5b1ebfb5ce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.844226] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1625.844520] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c04608d7-b99b-4419-b43a-94455bf16a76 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.853381] env[63241]: DEBUG oslo_vmware.api [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1625.853381] env[63241]: value = "task-1820523" [ 1625.853381] env[63241]: _type = "Task" [ 1625.853381] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.866640] env[63241]: DEBUG oslo_vmware.api [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.880454] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820520, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576881} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.880778] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 1626092d-78ef-41b5-8b47-fb840d63e4f4/1626092d-78ef-41b5-8b47-fb840d63e4f4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1625.880995] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1625.882084] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e08fe5b-2775-4f2e-ae2e-14636b34c7bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.892943] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1625.892943] env[63241]: value = "task-1820524" [ 1625.892943] env[63241]: _type = "Task" [ 1625.892943] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.905999] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820524, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.916548] env[63241]: DEBUG oslo_concurrency.lockutils [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] Releasing lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.916945] env[63241]: DEBUG nova.compute.manager [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received event network-changed-10657b5b-6750-4389-b802-7e6bee8963e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1625.917179] env[63241]: DEBUG nova.compute.manager [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Refreshing instance network info cache due to event network-changed-10657b5b-6750-4389-b802-7e6bee8963e7. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1625.917621] env[63241]: DEBUG oslo_concurrency.lockutils [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] Acquiring lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.917621] env[63241]: DEBUG oslo_concurrency.lockutils [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] Acquired lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.917787] env[63241]: DEBUG nova.network.neutron [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Refreshing network info cache for port 10657b5b-6750-4389-b802-7e6bee8963e7 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1626.085820] env[63241]: DEBUG nova.network.neutron [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Successfully created port: e025d87f-adf8-4be9-91fa-85161ae568cf {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1626.130441] env[63241]: DEBUG nova.network.neutron [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1626.151452] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c75399-913e-4e63-a286-a026832ff90e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.167637] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61094a09-ce5c-4189-8fd2-3164ffa1393f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.202215] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b965b75d-d184-490e-a7ef-a3c52c733d07 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.213450] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26721be-18fc-4954-8f0c-0f3977312988 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.234975] env[63241]: DEBUG nova.compute.provider_tree [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1626.248627] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820522, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.364132] env[63241]: DEBUG oslo_vmware.api [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820523, 'name': PowerOffVM_Task, 'duration_secs': 0.2785} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.364418] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1626.364610] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1626.364915] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f9085b9-20e1-4551-ac8a-13175600e9e3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.400360] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820524, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100245} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.402267] env[63241]: DEBUG nova.network.neutron [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Updating instance_info_cache with network_info: [{"id": "871c0af7-a67d-4b89-b5aa-bc46ce52b6b5", "address": "fa:16:3e:5f:97:7e", "network": {"id": "3d384599-a1b0-4ab0-91ff-44e793fe0605", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1391741783-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa6725d063734b7183d5a1ef4106ef60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d5d0e0d-cdec-474a-a891-a9ceff15a8b2", "external-id": "nsx-vlan-transportzone-456", "segmentation_id": 456, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap871c0af7-a6", "ovs_interfaceid": "871c0af7-a67d-4b89-b5aa-bc46ce52b6b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.402640] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1626.403505] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb94d87-ecbf-4214-b8a5-2cfd5aeb2445 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.436317] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 1626092d-78ef-41b5-8b47-fb840d63e4f4/1626092d-78ef-41b5-8b47-fb840d63e4f4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1626.437131] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56be4d7d-d656-4dd4-8377-7a1066804d61 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.464262] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1626.464262] env[63241]: value = "task-1820526" [ 1626.464262] env[63241]: _type = "Task" [ 1626.464262] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.469067] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1626.469323] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1626.469515] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleting the datastore file [datastore1] 0e5447fd-a04f-4bc2-b329-e015883773b8 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1626.470218] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f10d8d3d-8775-4e62-8dba-b253652d3a3e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.475381] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820526, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.480640] env[63241]: DEBUG oslo_vmware.api [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1626.480640] env[63241]: value = "task-1820527" [ 1626.480640] env[63241]: _type = "Task" [ 1626.480640] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.491415] env[63241]: DEBUG oslo_vmware.api [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.741031] env[63241]: DEBUG nova.scheduler.client.report [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1626.746862] env[63241]: DEBUG nova.compute.manager [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1626.760570] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820522, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.798024] env[63241]: DEBUG nova.virt.hardware [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1626.798024] env[63241]: DEBUG nova.virt.hardware [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1626.798024] env[63241]: DEBUG nova.virt.hardware [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1626.798024] env[63241]: DEBUG nova.virt.hardware [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1626.798024] env[63241]: DEBUG nova.virt.hardware [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1626.798024] env[63241]: DEBUG nova.virt.hardware [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1626.798024] env[63241]: DEBUG nova.virt.hardware [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1626.798024] env[63241]: DEBUG nova.virt.hardware [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1626.798024] env[63241]: DEBUG nova.virt.hardware [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1626.798024] env[63241]: DEBUG nova.virt.hardware [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1626.798024] env[63241]: DEBUG nova.virt.hardware [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1626.799457] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772c0a76-5858-4895-b3b4-667e4579a344 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.803795] env[63241]: DEBUG nova.network.neutron [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updated VIF entry in instance network info cache for port 10657b5b-6750-4389-b802-7e6bee8963e7. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1626.804202] env[63241]: DEBUG nova.network.neutron [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updating instance_info_cache with network_info: [{"id": "10657b5b-6750-4389-b802-7e6bee8963e7", "address": "fa:16:3e:ea:10:72", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10657b5b-67", "ovs_interfaceid": "10657b5b-6750-4389-b802-7e6bee8963e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.813928] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efbeaf64-2fa2-409b-b4b1-b11fbd4d8e1a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.912014] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Releasing lock "refresh_cache-20c7a1a1-4396-414f-a52c-06551722b6eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.912014] env[63241]: DEBUG nova.compute.manager [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Instance network_info: |[{"id": "871c0af7-a67d-4b89-b5aa-bc46ce52b6b5", "address": "fa:16:3e:5f:97:7e", "network": {"id": "3d384599-a1b0-4ab0-91ff-44e793fe0605", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1391741783-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa6725d063734b7183d5a1ef4106ef60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d5d0e0d-cdec-474a-a891-a9ceff15a8b2", "external-id": "nsx-vlan-transportzone-456", "segmentation_id": 456, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap871c0af7-a6", "ovs_interfaceid": "871c0af7-a67d-4b89-b5aa-bc46ce52b6b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1626.912268] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:97:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d5d0e0d-cdec-474a-a891-a9ceff15a8b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '871c0af7-a67d-4b89-b5aa-bc46ce52b6b5', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1626.924756] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Creating folder: Project (aa6725d063734b7183d5a1ef4106ef60). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1626.925872] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-637da5f5-a866-4074-93f8-86abdf0ed537 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.938385] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Created folder: Project (aa6725d063734b7183d5a1ef4106ef60) in parent group-v376927. [ 1626.938645] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Creating folder: Instances. Parent ref: group-v377110. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1626.938850] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36c758cb-f312-4166-a8f3-b52eaa684cdc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.949709] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Created folder: Instances in parent group-v377110. [ 1626.949709] env[63241]: DEBUG oslo.service.loopingcall [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1626.949964] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1626.949964] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a780c3a7-1fc1-46b2-96b1-a3ba7eaf7f2b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.975749] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.976883] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1626.976883] env[63241]: value = "task-1820530" [ 1626.976883] env[63241]: _type = "Task" [ 1626.976883] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.991770] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820530, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.001038] env[63241]: DEBUG oslo_vmware.api [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.116334] env[63241]: DEBUG nova.compute.manager [req-5d8b47f7-1060-4c1c-a187-c66b1b1bd593 req-265ab03a-e419-4dfe-9383-0b893e9f58ef service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Received event network-changed-871c0af7-a67d-4b89-b5aa-bc46ce52b6b5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1627.116334] env[63241]: DEBUG nova.compute.manager [req-5d8b47f7-1060-4c1c-a187-c66b1b1bd593 req-265ab03a-e419-4dfe-9383-0b893e9f58ef service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Refreshing instance network info cache due to event network-changed-871c0af7-a67d-4b89-b5aa-bc46ce52b6b5. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1627.116334] env[63241]: DEBUG oslo_concurrency.lockutils [req-5d8b47f7-1060-4c1c-a187-c66b1b1bd593 req-265ab03a-e419-4dfe-9383-0b893e9f58ef service nova] Acquiring lock "refresh_cache-20c7a1a1-4396-414f-a52c-06551722b6eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.116752] env[63241]: DEBUG oslo_concurrency.lockutils [req-5d8b47f7-1060-4c1c-a187-c66b1b1bd593 req-265ab03a-e419-4dfe-9383-0b893e9f58ef service nova] Acquired lock "refresh_cache-20c7a1a1-4396-414f-a52c-06551722b6eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.117077] env[63241]: DEBUG nova.network.neutron [req-5d8b47f7-1060-4c1c-a187-c66b1b1bd593 req-265ab03a-e419-4dfe-9383-0b893e9f58ef service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Refreshing network info cache for port 871c0af7-a67d-4b89-b5aa-bc46ce52b6b5 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1627.251635] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.252252] env[63241]: DEBUG nova.compute.manager [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1627.258135] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.838s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.258135] env[63241]: DEBUG nova.objects.instance [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Lazy-loading 'resources' on Instance uuid 9b61cee5-65b4-499e-80fd-c6ce6f79dd13 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1627.264467] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820522, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.312163] env[63241]: DEBUG oslo_concurrency.lockutils [req-e96fbfdd-2118-4997-ab7d-3507f0bb17e3 req-4b367afe-4c62-4c87-8ec9-62c2281ad5c3 service nova] Releasing lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.337463] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquiring lock "cfdc6b34-6940-414f-b17d-6fe17f92474a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.337463] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Lock "cfdc6b34-6940-414f-b17d-6fe17f92474a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.337463] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquiring lock "cfdc6b34-6940-414f-b17d-6fe17f92474a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.337463] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Lock "cfdc6b34-6940-414f-b17d-6fe17f92474a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.337463] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Lock "cfdc6b34-6940-414f-b17d-6fe17f92474a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.340091] env[63241]: INFO nova.compute.manager [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Terminating instance [ 1627.344064] env[63241]: DEBUG nova.compute.manager [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1627.344064] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1627.344064] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c681531-59dd-42c7-8140-112800429af3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.355427] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1627.355862] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-827a7fc3-db49-4718-9392-50af0d27a8ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.366321] env[63241]: DEBUG oslo_vmware.api [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for the task: (returnval){ [ 1627.366321] env[63241]: value = "task-1820531" [ 1627.366321] env[63241]: _type = "Task" [ 1627.366321] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.380998] env[63241]: DEBUG oslo_vmware.api [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820531, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.478937] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquiring lock "effc3987-45d0-4305-83a2-0eba47d2c7fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.478937] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Lock "effc3987-45d0-4305-83a2-0eba47d2c7fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.485569] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.499692] env[63241]: DEBUG oslo_vmware.api [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.505716] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820530, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.761347] env[63241]: DEBUG nova.compute.utils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1627.768619] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820522, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.772405] env[63241]: DEBUG nova.compute.manager [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1627.772679] env[63241]: DEBUG nova.network.neutron [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1627.878907] env[63241]: DEBUG oslo_vmware.api [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820531, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.906700] env[63241]: DEBUG nova.policy [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6b1519467304fc5bb33d508c55348be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '854490ce445a413d85901cfe6b091346', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1627.977027] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820526, 'name': ReconfigVM_Task, 'duration_secs': 1.393059} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.977027] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 1626092d-78ef-41b5-8b47-fb840d63e4f4/1626092d-78ef-41b5-8b47-fb840d63e4f4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1627.978062] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b21bc71-0bc1-4234-a741-84f4fe6c6299 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.994556] env[63241]: DEBUG nova.network.neutron [req-5d8b47f7-1060-4c1c-a187-c66b1b1bd593 req-265ab03a-e419-4dfe-9383-0b893e9f58ef service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Updated VIF entry in instance network info cache for port 871c0af7-a67d-4b89-b5aa-bc46ce52b6b5. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1627.995018] env[63241]: DEBUG nova.network.neutron [req-5d8b47f7-1060-4c1c-a187-c66b1b1bd593 req-265ab03a-e419-4dfe-9383-0b893e9f58ef service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Updating instance_info_cache with network_info: [{"id": "871c0af7-a67d-4b89-b5aa-bc46ce52b6b5", "address": "fa:16:3e:5f:97:7e", "network": {"id": "3d384599-a1b0-4ab0-91ff-44e793fe0605", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1391741783-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa6725d063734b7183d5a1ef4106ef60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d5d0e0d-cdec-474a-a891-a9ceff15a8b2", "external-id": "nsx-vlan-transportzone-456", "segmentation_id": 456, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap871c0af7-a6", "ovs_interfaceid": "871c0af7-a67d-4b89-b5aa-bc46ce52b6b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.997007] env[63241]: DEBUG nova.compute.manager [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1628.000050] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1628.000050] env[63241]: value = "task-1820532" [ 1628.000050] env[63241]: _type = "Task" [ 1628.000050] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.015191] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820530, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.015191] env[63241]: DEBUG oslo_vmware.api [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820527, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.473538} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.016033] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1628.016489] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1628.016489] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1628.016639] env[63241]: INFO nova.compute.manager [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Took 2.19 seconds to destroy the instance on the hypervisor. [ 1628.016837] env[63241]: DEBUG oslo.service.loopingcall [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.017358] env[63241]: DEBUG nova.compute.manager [-] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1628.017795] env[63241]: DEBUG nova.network.neutron [-] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1628.023238] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820532, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.215156] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f56d385-8089-4b5a-b263-797140e63776 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.222902] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16930ab5-ecc6-4745-a3b7-92dd33934d38 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.256924] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870cdd79-8b71-454c-afec-dd21b3c390ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.267656] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3574a6a2-8752-4e1b-9c61-1cf7f50b98d8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.271489] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820522, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.755619} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.271761] env[63241]: INFO nova.virt.vmwareapi.ds_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_a4c834fc-7975-4643-ba2e-bdc502c97bf9/OSTACK_IMG_a4c834fc-7975-4643-ba2e-bdc502c97bf9.vmdk to [datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa.vmdk. [ 1628.272082] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Cleaning up location [datastore1] OSTACK_IMG_a4c834fc-7975-4643-ba2e-bdc502c97bf9 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1628.272214] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_a4c834fc-7975-4643-ba2e-bdc502c97bf9 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1628.273712] env[63241]: DEBUG nova.compute.manager [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1628.276225] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8d3f6b6-c12b-4d91-ab90-ba1c1ddef026 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.289310] env[63241]: DEBUG nova.compute.provider_tree [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1628.295516] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1628.295516] env[63241]: value = "task-1820533" [ 1628.295516] env[63241]: _type = "Task" [ 1628.295516] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.304754] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820533, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.377329] env[63241]: DEBUG oslo_vmware.api [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820531, 'name': PowerOffVM_Task, 'duration_secs': 0.614232} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.380977] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1628.381194] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1628.382074] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7532e361-63fd-45fb-a725-2e6925bf95b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.441110] env[63241]: DEBUG nova.network.neutron [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Successfully updated port: e025d87f-adf8-4be9-91fa-85161ae568cf {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1628.495779] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820530, 'name': CreateVM_Task, 'duration_secs': 1.208599} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.496052] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1628.497032] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.497032] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.498153] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1628.498153] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fd25af0-651b-412e-8044-0a0addee557b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.501862] env[63241]: DEBUG oslo_concurrency.lockutils [req-5d8b47f7-1060-4c1c-a187-c66b1b1bd593 req-265ab03a-e419-4dfe-9383-0b893e9f58ef service nova] Releasing lock "refresh_cache-20c7a1a1-4396-414f-a52c-06551722b6eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.505417] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for the task: (returnval){ [ 1628.505417] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529c2c57-a946-374e-edff-f78fffb8e77e" [ 1628.505417] env[63241]: _type = "Task" [ 1628.505417] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.520394] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820532, 'name': Rename_Task, 'duration_secs': 0.167438} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.524588] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1628.524796] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529c2c57-a946-374e-edff-f78fffb8e77e, 'name': SearchDatastore_Task, 'duration_secs': 0.010363} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.525104] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51691a2a-626b-47d5-b170-e7d4240b8429 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.527027] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.527318] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1628.527625] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.527862] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.528881] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1628.529195] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.529967] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e72168c-7dbe-4443-ba79-2749f5a4fada {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.534821] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1628.534821] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1628.534821] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Deleting the datastore file [datastore1] cfdc6b34-6940-414f-b17d-6fe17f92474a {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1628.534821] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b62cdb5d-7605-46cc-818b-1eaf43c13f6c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.540023] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1628.540023] env[63241]: value = "task-1820535" [ 1628.540023] env[63241]: _type = "Task" [ 1628.540023] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.543170] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1628.543576] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1628.544268] env[63241]: DEBUG oslo_vmware.api [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for the task: (returnval){ [ 1628.544268] env[63241]: value = "task-1820536" [ 1628.544268] env[63241]: _type = "Task" [ 1628.544268] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.544845] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dfa8c61-1777-4f89-844f-4905a606ee79 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.553009] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820535, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.556074] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for the task: (returnval){ [ 1628.556074] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a5fb28-8106-35f1-57e3-b922fd43aae8" [ 1628.556074] env[63241]: _type = "Task" [ 1628.556074] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.559337] env[63241]: DEBUG oslo_vmware.api [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.569060] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a5fb28-8106-35f1-57e3-b922fd43aae8, 'name': SearchDatastore_Task, 'duration_secs': 0.009098} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.569907] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-897aa388-e01a-44cd-b4bf-c574fc5b9daf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.575381] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for the task: (returnval){ [ 1628.575381] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e0a5b6-a127-4c89-abc3-933aca0da8b9" [ 1628.575381] env[63241]: _type = "Task" [ 1628.575381] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.584642] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e0a5b6-a127-4c89-abc3-933aca0da8b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.606927] env[63241]: DEBUG nova.network.neutron [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Successfully created port: f11430c6-423c-4742-b139-54246d031151 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1628.792214] env[63241]: DEBUG nova.scheduler.client.report [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1628.805159] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820533, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036944} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.805487] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1628.805634] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.805933] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa.vmdk to [datastore1] f583adda-976e-4f79-adc7-0b4e1a73ad73/f583adda-976e-4f79-adc7-0b4e1a73ad73.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1628.806236] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0e51c7c-822e-48d8-82d6-ed8fe2f4148e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.813402] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1628.813402] env[63241]: value = "task-1820537" [ 1628.813402] env[63241]: _type = "Task" [ 1628.813402] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.825317] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.940887] env[63241]: DEBUG nova.compute.manager [req-fc56965f-97cd-4ae7-80f3-a16c8bfeac8d req-324967ca-cd48-44b6-8162-a0f22c84c3db service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Received event network-vif-deleted-c55c077c-917c-45af-a4ea-c51ad8696209 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1628.941241] env[63241]: INFO nova.compute.manager [req-fc56965f-97cd-4ae7-80f3-a16c8bfeac8d req-324967ca-cd48-44b6-8162-a0f22c84c3db service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Neutron deleted interface c55c077c-917c-45af-a4ea-c51ad8696209; detaching it from the instance and deleting it from the info cache [ 1628.941526] env[63241]: DEBUG nova.network.neutron [req-fc56965f-97cd-4ae7-80f3-a16c8bfeac8d req-324967ca-cd48-44b6-8162-a0f22c84c3db service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.944179] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.944411] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.944639] env[63241]: DEBUG nova.network.neutron [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1628.975216] env[63241]: DEBUG nova.network.neutron [-] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.058427] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820535, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.064376] env[63241]: DEBUG oslo_vmware.api [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Task: {'id': task-1820536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14055} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.064899] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1629.065172] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1629.065428] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1629.065695] env[63241]: INFO nova.compute.manager [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Took 1.72 seconds to destroy the instance on the hypervisor. [ 1629.066028] env[63241]: DEBUG oslo.service.loopingcall [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1629.066284] env[63241]: DEBUG nova.compute.manager [-] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1629.066431] env[63241]: DEBUG nova.network.neutron [-] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1629.088447] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e0a5b6-a127-4c89-abc3-933aca0da8b9, 'name': SearchDatastore_Task, 'duration_secs': 0.009576} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.088790] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1629.089101] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 20c7a1a1-4396-414f-a52c-06551722b6eb/20c7a1a1-4396-414f-a52c-06551722b6eb.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1629.089420] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11d89d96-eca1-47eb-9dda-c9d71a9f0150 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.099777] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for the task: (returnval){ [ 1629.099777] env[63241]: value = "task-1820538" [ 1629.099777] env[63241]: _type = "Task" [ 1629.099777] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.107063] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820538, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.276464] env[63241]: DEBUG nova.compute.manager [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Received event network-vif-plugged-e025d87f-adf8-4be9-91fa-85161ae568cf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1629.276685] env[63241]: DEBUG oslo_concurrency.lockutils [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] Acquiring lock "fb5d60fa-fa13-44a1-8291-4645761a0c80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.276902] env[63241]: DEBUG oslo_concurrency.lockutils [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] Lock "fb5d60fa-fa13-44a1-8291-4645761a0c80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.277088] env[63241]: DEBUG oslo_concurrency.lockutils [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] Lock "fb5d60fa-fa13-44a1-8291-4645761a0c80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.277479] env[63241]: DEBUG nova.compute.manager [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] No waiting events found dispatching network-vif-plugged-e025d87f-adf8-4be9-91fa-85161ae568cf {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1629.277479] env[63241]: WARNING nova.compute.manager [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Received unexpected event network-vif-plugged-e025d87f-adf8-4be9-91fa-85161ae568cf for instance with vm_state building and task_state spawning. [ 1629.277653] env[63241]: DEBUG nova.compute.manager [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Received event network-changed-e025d87f-adf8-4be9-91fa-85161ae568cf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1629.277824] env[63241]: DEBUG nova.compute.manager [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Refreshing instance network info cache due to event network-changed-e025d87f-adf8-4be9-91fa-85161ae568cf. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1629.277996] env[63241]: DEBUG oslo_concurrency.lockutils [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] Acquiring lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1629.286300] env[63241]: DEBUG nova.compute.manager [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1629.301735] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.046s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.305220] env[63241]: DEBUG oslo_concurrency.lockutils [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 27.608s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.323364] env[63241]: DEBUG nova.virt.hardware [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1629.323628] env[63241]: DEBUG nova.virt.hardware [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1629.323786] env[63241]: DEBUG nova.virt.hardware [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1629.323968] env[63241]: DEBUG nova.virt.hardware [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1629.324692] env[63241]: DEBUG nova.virt.hardware [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1629.324870] env[63241]: DEBUG nova.virt.hardware [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1629.325101] env[63241]: DEBUG nova.virt.hardware [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1629.325272] env[63241]: DEBUG nova.virt.hardware [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1629.325531] env[63241]: DEBUG nova.virt.hardware [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1629.325717] env[63241]: DEBUG nova.virt.hardware [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1629.325895] env[63241]: DEBUG nova.virt.hardware [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1629.326827] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d228f16-a436-4830-9881-d7168beb3bdc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.332896] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820537, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.338788] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77168e2f-8b0a-413a-9f54-c375b79cdb98 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.355365] env[63241]: INFO nova.scheduler.client.report [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Deleted allocations for instance 9b61cee5-65b4-499e-80fd-c6ce6f79dd13 [ 1629.444174] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "29b6caa8-a07c-494b-b776-b08affa45c87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.444458] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.450057] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ff0116a-e649-4ba2-8ff1-13dbd3b4f2ad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.460473] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0112ff27-116f-423e-aefb-d8b62656eb7f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.478149] env[63241]: INFO nova.compute.manager [-] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Took 1.46 seconds to deallocate network for instance. [ 1629.495709] env[63241]: DEBUG nova.compute.manager [req-fc56965f-97cd-4ae7-80f3-a16c8bfeac8d req-324967ca-cd48-44b6-8162-a0f22c84c3db service nova] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Detach interface failed, port_id=c55c077c-917c-45af-a4ea-c51ad8696209, reason: Instance 0e5447fd-a04f-4bc2-b329-e015883773b8 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1629.497115] env[63241]: DEBUG nova.network.neutron [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1629.552545] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820535, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.607282] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820538, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.655175] env[63241]: DEBUG nova.network.neutron [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Updating instance_info_cache with network_info: [{"id": "e025d87f-adf8-4be9-91fa-85161ae568cf", "address": "fa:16:3e:e3:fb:11", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape025d87f-ad", "ovs_interfaceid": "e025d87f-adf8-4be9-91fa-85161ae568cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.823477] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820537, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.865692] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e6dd2159-fcba-4882-bef0-18b18f975719 tempest-InstanceActionsNegativeTestJSON-100267800 tempest-InstanceActionsNegativeTestJSON-100267800-project-member] Lock "9b61cee5-65b4-499e-80fd-c6ce6f79dd13" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.435s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.893278] env[63241]: DEBUG nova.network.neutron [-] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.953832] env[63241]: DEBUG nova.compute.manager [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1629.999552] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.059966] env[63241]: DEBUG oslo_vmware.api [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820535, 'name': PowerOnVM_Task, 'duration_secs': 1.36834} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.060498] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1630.061437] env[63241]: INFO nova.compute.manager [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Took 9.41 seconds to spawn the instance on the hypervisor. [ 1630.061658] env[63241]: DEBUG nova.compute.manager [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1630.062738] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1c40b0-437d-4600-9ad2-4f74eaaf9830 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.112313] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820538, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.148988] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8472034-3819-4cc9-9311-0733106da111 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.160356] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.160356] env[63241]: DEBUG nova.compute.manager [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Instance network_info: |[{"id": "e025d87f-adf8-4be9-91fa-85161ae568cf", "address": "fa:16:3e:e3:fb:11", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape025d87f-ad", "ovs_interfaceid": "e025d87f-adf8-4be9-91fa-85161ae568cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1630.160667] env[63241]: DEBUG oslo_concurrency.lockutils [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] Acquired lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.160826] env[63241]: DEBUG nova.network.neutron [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Refreshing network info cache for port e025d87f-adf8-4be9-91fa-85161ae568cf {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1630.162294] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:fb:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '095fbf26-7367-4f9e-87c5-2965b64b0b0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e025d87f-adf8-4be9-91fa-85161ae568cf', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1630.171203] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Creating folder: Project (854490ce445a413d85901cfe6b091346). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1630.172532] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6bc0e1-a746-4c5f-8485-892c97d4ff44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.180624] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5043b2be-7483-4855-a5e0-24a641941c4d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.216226] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac8e4b9-2e33-4f03-bf50-e06501cc3be8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.218992] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Created folder: Project (854490ce445a413d85901cfe6b091346) in parent group-v376927. [ 1630.219213] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Creating folder: Instances. Parent ref: group-v377113. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1630.219464] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c187e0b9-5e7a-41cb-a969-172611e1a2ba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.229814] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dda6f72-656c-49af-8dd7-b0c5934b44c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.236408] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Created folder: Instances in parent group-v377113. [ 1630.236759] env[63241]: DEBUG oslo.service.loopingcall [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1630.237361] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1630.237602] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54c06ee7-6f4d-44cb-b073-44aa5b71b854 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.262493] env[63241]: DEBUG nova.compute.provider_tree [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1630.270140] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1630.270140] env[63241]: value = "task-1820541" [ 1630.270140] env[63241]: _type = "Task" [ 1630.270140] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.278577] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820541, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.324950] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820537, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.398214] env[63241]: INFO nova.compute.manager [-] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Took 1.33 seconds to deallocate network for instance. [ 1630.490020] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.590670] env[63241]: INFO nova.compute.manager [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Took 56.34 seconds to build instance. [ 1630.613406] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820538, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.678967] env[63241]: DEBUG nova.network.neutron [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Successfully updated port: f11430c6-423c-4742-b139-54246d031151 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1630.763602] env[63241]: DEBUG nova.network.neutron [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Updated VIF entry in instance network info cache for port e025d87f-adf8-4be9-91fa-85161ae568cf. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1630.764058] env[63241]: DEBUG nova.network.neutron [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Updating instance_info_cache with network_info: [{"id": "e025d87f-adf8-4be9-91fa-85161ae568cf", "address": "fa:16:3e:e3:fb:11", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape025d87f-ad", "ovs_interfaceid": "e025d87f-adf8-4be9-91fa-85161ae568cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1630.781960] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820541, 'name': CreateVM_Task, 'duration_secs': 0.409946} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.782845] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1630.783608] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.783805] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.784158] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1630.784727] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-149c7d38-da4b-4783-acf7-1d1ce9ac9c4b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.790524] env[63241]: ERROR nova.scheduler.client.report [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [req-afaaa6dd-07a6-464d-981c-058581937f42] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-afaaa6dd-07a6-464d-981c-058581937f42"}]} [ 1630.797517] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1630.797517] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524ac4eb-647b-4621-459b-d9656efc66bf" [ 1630.797517] env[63241]: _type = "Task" [ 1630.797517] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.806072] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524ac4eb-647b-4621-459b-d9656efc66bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.809432] env[63241]: DEBUG nova.scheduler.client.report [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1630.825633] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820537, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.833181] env[63241]: DEBUG nova.scheduler.client.report [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1630.833412] env[63241]: DEBUG nova.compute.provider_tree [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1630.849348] env[63241]: DEBUG nova.scheduler.client.report [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1630.870082] env[63241]: DEBUG nova.scheduler.client.report [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1630.906278] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.001775] env[63241]: DEBUG nova.compute.manager [req-bdaa4487-06fc-440e-8825-3d968e40540a req-062b1c68-0c25-419f-97a5-d3d3789c5377 service nova] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Received event network-vif-deleted-8d4032b3-2e92-4f2b-8a3e-7ebd1da84c6d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1631.092862] env[63241]: DEBUG oslo_concurrency.lockutils [None req-56c98005-12d4-4c89-8b91-9802357d6a04 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.845s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.114772] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820538, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.185041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "refresh_cache-14af9f82-525e-453c-8dc5-ef5b13c67ee4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.185041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "refresh_cache-14af9f82-525e-453c-8dc5-ef5b13c67ee4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.185041] env[63241]: DEBUG nova.network.neutron [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1631.238082] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b224de4-b0db-4165-af1d-c64ad7c77dc3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.248673] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5689d682-d169-4127-9c52-e187cf1e8ac0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.292876] env[63241]: DEBUG oslo_concurrency.lockutils [req-45855cd6-a440-4c23-a70b-d671962560dc req-80039761-b07d-4156-9e69-0d64bf430c4c service nova] Releasing lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.294835] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3c86d0-7835-41b2-9202-462c90d730c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.308984] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a2a34b-6e4c-4744-bba5-6dbeff9d5de2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.323893] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524ac4eb-647b-4621-459b-d9656efc66bf, 'name': SearchDatastore_Task, 'duration_secs': 0.090831} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.328454] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.329030] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1631.329285] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.329535] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.329838] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1631.339012] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a98be371-3568-4a97-a2fe-3e94b2926c48 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.341611] env[63241]: DEBUG nova.compute.provider_tree [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1631.348747] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820537, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.505295} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.349032] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa/329bc4d0-0ed9-4ffe-a843-80beee7f7bfa.vmdk to [datastore1] f583adda-976e-4f79-adc7-0b4e1a73ad73/f583adda-976e-4f79-adc7-0b4e1a73ad73.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1631.349946] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a869db0a-987d-48a5-9ccb-9bb423634142 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.354908] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1631.355135] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1631.356608] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-150da6db-3f5f-48a1-ab89-be1c478c1224 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.378423] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] f583adda-976e-4f79-adc7-0b4e1a73ad73/f583adda-976e-4f79-adc7-0b4e1a73ad73.vmdk or device None with type streamOptimized {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1631.380617] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dae0fb2-503b-4116-8b81-94d5d2105e77 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.396788] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1631.396788] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521ff4a7-0b7f-0e21-4aa0-a9d688c8d891" [ 1631.396788] env[63241]: _type = "Task" [ 1631.396788] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.402617] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1631.402617] env[63241]: value = "task-1820542" [ 1631.402617] env[63241]: _type = "Task" [ 1631.402617] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.406518] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521ff4a7-0b7f-0e21-4aa0-a9d688c8d891, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.417122] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820542, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.442241] env[63241]: DEBUG nova.compute.manager [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Received event network-vif-plugged-f11430c6-423c-4742-b139-54246d031151 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1631.442905] env[63241]: DEBUG oslo_concurrency.lockutils [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] Acquiring lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.442905] env[63241]: DEBUG oslo_concurrency.lockutils [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] Lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.443195] env[63241]: DEBUG oslo_concurrency.lockutils [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] Lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.443195] env[63241]: DEBUG nova.compute.manager [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] No waiting events found dispatching network-vif-plugged-f11430c6-423c-4742-b139-54246d031151 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1631.443374] env[63241]: WARNING nova.compute.manager [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Received unexpected event network-vif-plugged-f11430c6-423c-4742-b139-54246d031151 for instance with vm_state building and task_state spawning. [ 1631.443552] env[63241]: DEBUG nova.compute.manager [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Received event network-changed-f11430c6-423c-4742-b139-54246d031151 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1631.443750] env[63241]: DEBUG nova.compute.manager [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Refreshing instance network info cache due to event network-changed-f11430c6-423c-4742-b139-54246d031151. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1631.443859] env[63241]: DEBUG oslo_concurrency.lockutils [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] Acquiring lock "refresh_cache-14af9f82-525e-453c-8dc5-ef5b13c67ee4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.611829] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820538, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.375968} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.611829] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 20c7a1a1-4396-414f-a52c-06551722b6eb/20c7a1a1-4396-414f-a52c-06551722b6eb.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1631.611829] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1631.612277] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb0c6ead-b179-4322-9670-45795d56e6af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.619032] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for the task: (returnval){ [ 1631.619032] env[63241]: value = "task-1820543" [ 1631.619032] env[63241]: _type = "Task" [ 1631.619032] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.627341] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.683879] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "1626092d-78ef-41b5-8b47-fb840d63e4f4" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.684216] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.684414] env[63241]: INFO nova.compute.manager [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Shelving [ 1631.729042] env[63241]: DEBUG nova.network.neutron [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1631.893467] env[63241]: DEBUG nova.scheduler.client.report [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 96 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1631.894264] env[63241]: DEBUG nova.compute.provider_tree [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 96 to 97 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1631.894584] env[63241]: DEBUG nova.compute.provider_tree [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1631.911892] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521ff4a7-0b7f-0e21-4aa0-a9d688c8d891, 'name': SearchDatastore_Task, 'duration_secs': 0.055034} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.917539] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3acb2454-21fc-4317-880d-5f4a9c57f560 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.926422] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820542, 'name': ReconfigVM_Task, 'duration_secs': 0.321973} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.928849] env[63241]: DEBUG nova.network.neutron [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Updating instance_info_cache with network_info: [{"id": "f11430c6-423c-4742-b139-54246d031151", "address": "fa:16:3e:5f:48:28", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf11430c6-42", "ovs_interfaceid": "f11430c6-423c-4742-b139-54246d031151", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.930455] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Reconfigured VM instance instance-00000040 to attach disk [datastore1] f583adda-976e-4f79-adc7-0b4e1a73ad73/f583adda-976e-4f79-adc7-0b4e1a73ad73.vmdk or device None with type streamOptimized {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1631.933806] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1631.933806] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52bfcd46-d076-ffc5-928d-bfb7499c34dd" [ 1631.933806] env[63241]: _type = "Task" [ 1631.933806] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.933806] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4a7d90d-50e6-4ee3-a90b-31a8c13d561f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.942216] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52bfcd46-d076-ffc5-928d-bfb7499c34dd, 'name': SearchDatastore_Task, 'duration_secs': 0.00986} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.943603] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.944567] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] fb5d60fa-fa13-44a1-8291-4645761a0c80/fb5d60fa-fa13-44a1-8291-4645761a0c80.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1631.944810] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1631.944810] env[63241]: value = "task-1820544" [ 1631.944810] env[63241]: _type = "Task" [ 1631.944810] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.945043] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8031b965-772d-4ed2-913d-476b00bf1afc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.957427] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820544, 'name': Rename_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.958468] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1631.958468] env[63241]: value = "task-1820545" [ 1631.958468] env[63241]: _type = "Task" [ 1631.958468] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.968843] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820545, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.130721] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820543, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06541} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.131493] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1632.132465] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61553c78-bccc-4833-9fdd-1c3068a95658 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.156637] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 20c7a1a1-4396-414f-a52c-06551722b6eb/20c7a1a1-4396-414f-a52c-06551722b6eb.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1632.157712] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fa5e337-5c6e-4a08-9e68-db550f1d1e04 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.178670] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for the task: (returnval){ [ 1632.178670] env[63241]: value = "task-1820546" [ 1632.178670] env[63241]: _type = "Task" [ 1632.178670] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.191793] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820546, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.195076] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1632.195372] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13d2e13d-2453-4e59-bb17-683d3c4079e5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.201975] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1632.201975] env[63241]: value = "task-1820547" [ 1632.201975] env[63241]: _type = "Task" [ 1632.201975] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.211107] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.435492] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "refresh_cache-14af9f82-525e-453c-8dc5-ef5b13c67ee4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.435492] env[63241]: DEBUG nova.compute.manager [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Instance network_info: |[{"id": "f11430c6-423c-4742-b139-54246d031151", "address": "fa:16:3e:5f:48:28", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf11430c6-42", "ovs_interfaceid": "f11430c6-423c-4742-b139-54246d031151", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1632.436040] env[63241]: DEBUG oslo_concurrency.lockutils [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] Acquired lock "refresh_cache-14af9f82-525e-453c-8dc5-ef5b13c67ee4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.436040] env[63241]: DEBUG nova.network.neutron [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Refreshing network info cache for port f11430c6-423c-4742-b139-54246d031151 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1632.437157] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:48:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '095fbf26-7367-4f9e-87c5-2965b64b0b0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f11430c6-423c-4742-b139-54246d031151', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1632.446222] env[63241]: DEBUG oslo.service.loopingcall [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1632.449786] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1632.450701] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0087931b-a7bd-4c46-b9bf-aca89b358433 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.480164] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820544, 'name': Rename_Task, 'duration_secs': 0.144024} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.484642] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1632.485033] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820545, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499829} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.485527] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1632.485527] env[63241]: value = "task-1820548" [ 1632.485527] env[63241]: _type = "Task" [ 1632.485527] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.485720] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-986eebce-8820-4067-990c-cd03ca66fc0e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.487438] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] fb5d60fa-fa13-44a1-8291-4645761a0c80/fb5d60fa-fa13-44a1-8291-4645761a0c80.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1632.487663] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1632.487929] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1fd92932-bb59-45a6-b880-bf730179fa49 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.498562] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820548, 'name': CreateVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.500740] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1632.500740] env[63241]: value = "task-1820549" [ 1632.500740] env[63241]: _type = "Task" [ 1632.500740] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.501028] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1632.501028] env[63241]: value = "task-1820550" [ 1632.501028] env[63241]: _type = "Task" [ 1632.501028] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.513088] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820550, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.518617] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.691149] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820546, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.714075] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820547, 'name': PowerOffVM_Task, 'duration_secs': 0.288927} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.714385] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1632.715256] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987882aa-f315-4ad8-b2d9-4f9bed5f1477 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.736405] env[63241]: DEBUG nova.network.neutron [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Updated VIF entry in instance network info cache for port f11430c6-423c-4742-b139-54246d031151. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1632.736799] env[63241]: DEBUG nova.network.neutron [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Updating instance_info_cache with network_info: [{"id": "f11430c6-423c-4742-b139-54246d031151", "address": "fa:16:3e:5f:48:28", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf11430c6-42", "ovs_interfaceid": "f11430c6-423c-4742-b139-54246d031151", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.739615] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6284af-04e3-4956-9f39-fc6e50c45b3f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.911610] env[63241]: DEBUG oslo_concurrency.lockutils [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.607s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.915013] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.016s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.917271] env[63241]: INFO nova.compute.claims [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1632.998792] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820548, 'name': CreateVM_Task, 'duration_secs': 0.47885} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.999037] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1632.999895] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.000121] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.000495] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1633.000792] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cef371ff-368c-4b47-a309-0722951025f6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.012071] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1633.012071] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5232d5aa-aa9f-fcc3-16a7-a341f40b1ace" [ 1633.012071] env[63241]: _type = "Task" [ 1633.012071] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.018078] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820550, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.251716} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.018304] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820549, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.021272] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1633.022141] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416863d9-a592-4290-b9cd-aa084e5fc81a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.031206] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5232d5aa-aa9f-fcc3-16a7-a341f40b1ace, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.049182] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] fb5d60fa-fa13-44a1-8291-4645761a0c80/fb5d60fa-fa13-44a1-8291-4645761a0c80.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1633.049798] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd9061e9-b112-4364-a981-ce5915e7c0cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.069258] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1633.069258] env[63241]: value = "task-1820551" [ 1633.069258] env[63241]: _type = "Task" [ 1633.069258] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.077629] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820551, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.190457] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820546, 'name': ReconfigVM_Task, 'duration_secs': 0.618232} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.190640] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 20c7a1a1-4396-414f-a52c-06551722b6eb/20c7a1a1-4396-414f-a52c-06551722b6eb.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1633.191311] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ba541f5-48ed-4047-917f-84ea0164dc8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.197191] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for the task: (returnval){ [ 1633.197191] env[63241]: value = "task-1820552" [ 1633.197191] env[63241]: _type = "Task" [ 1633.197191] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.205942] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820552, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.239847] env[63241]: DEBUG oslo_concurrency.lockutils [req-7bd00e1f-494a-4f81-913b-e1bf55c0269a req-739704fd-714e-44da-9877-ea09b7bb142a service nova] Releasing lock "refresh_cache-14af9f82-525e-453c-8dc5-ef5b13c67ee4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.250867] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1633.251262] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9acb34c7-7e51-48ca-a567-d774122e17d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.258977] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1633.258977] env[63241]: value = "task-1820553" [ 1633.258977] env[63241]: _type = "Task" [ 1633.258977] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.268311] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820553, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.476591] env[63241]: INFO nova.scheduler.client.report [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Deleted allocation for migration dbead17e-aa42-4c50-ae83-6d0d9b03d450 [ 1633.512631] env[63241]: DEBUG oslo_vmware.api [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820549, 'name': PowerOnVM_Task, 'duration_secs': 0.620981} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.512901] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1633.513120] env[63241]: INFO nova.compute.manager [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Took 15.41 seconds to spawn the instance on the hypervisor. [ 1633.513309] env[63241]: DEBUG nova.compute.manager [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1633.514179] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175a85a7-328c-47c2-bb70-8667047379e5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.530710] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5232d5aa-aa9f-fcc3-16a7-a341f40b1ace, 'name': SearchDatastore_Task, 'duration_secs': 0.017914} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.531709] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.531953] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1633.532684] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.532684] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.532684] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1633.533065] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08b1da17-be2c-46e1-abea-72a1b14e9f73 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.542624] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1633.544216] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1633.544306] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fb61467-e9dd-4eaa-b90c-904adda89427 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.549725] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1633.549725] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]525b836c-d990-c750-dbc2-113e5a34578d" [ 1633.549725] env[63241]: _type = "Task" [ 1633.549725] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.558350] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525b836c-d990-c750-dbc2-113e5a34578d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.581030] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820551, 'name': ReconfigVM_Task, 'duration_secs': 0.290371} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.581030] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Reconfigured VM instance instance-00000043 to attach disk [datastore1] fb5d60fa-fa13-44a1-8291-4645761a0c80/fb5d60fa-fa13-44a1-8291-4645761a0c80.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1633.581030] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-32277276-d24a-4ff5-bddd-ce57002d9053 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.587673] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1633.587673] env[63241]: value = "task-1820554" [ 1633.587673] env[63241]: _type = "Task" [ 1633.587673] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.598154] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820554, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.709830] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820552, 'name': Rename_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.773022] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820553, 'name': CreateSnapshot_Task, 'duration_secs': 0.482366} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.773022] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1633.773022] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51bbaf5-9b60-4a14-80ae-b3c5f5dfce41 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.982183] env[63241]: DEBUG oslo_concurrency.lockutils [None req-107d8682-5867-4c90-aaab-48f9c6a9fbda tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 35.229s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.035946] env[63241]: INFO nova.compute.manager [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Took 62.31 seconds to build instance. [ 1634.062395] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525b836c-d990-c750-dbc2-113e5a34578d, 'name': SearchDatastore_Task, 'duration_secs': 0.023626} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.064044] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d835c78a-2f98-4cbb-888d-ca3b060c8d87 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.073211] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1634.073211] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529fcb45-3194-17ef-490e-2a4cb5f8f0eb" [ 1634.073211] env[63241]: _type = "Task" [ 1634.073211] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.085817] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529fcb45-3194-17ef-490e-2a4cb5f8f0eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.099093] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820554, 'name': Rename_Task, 'duration_secs': 0.141036} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.099376] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1634.099622] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-673d113c-762b-49d5-80eb-6c46b3ab632c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.109199] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1634.109199] env[63241]: value = "task-1820555" [ 1634.109199] env[63241]: _type = "Task" [ 1634.109199] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.117950] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820555, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.215018] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820552, 'name': Rename_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.292431] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1634.293574] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b3e02c-3c01-413e-8126-550ef2480d9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.296662] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-63e55cbe-ca9f-4224-a764-8d2d3d555b8d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.307838] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adc1d28-5ce5-4e4f-98b8-409dfd3ecb80 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.311513] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1634.311513] env[63241]: value = "task-1820556" [ 1634.311513] env[63241]: _type = "Task" [ 1634.311513] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.346956] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2791bf9b-6bd1-479b-ad11-934fc2162bcc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.348906] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820556, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.354592] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bafa660-5580-4fa4-b038-38c77dbf669d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.368966] env[63241]: DEBUG nova.compute.provider_tree [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1634.538789] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b661eba3-d17f-4309-850d-7bad9214c3ca tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "f583adda-976e-4f79-adc7-0b4e1a73ad73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.821s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.574910] env[63241]: DEBUG oslo_concurrency.lockutils [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "f583adda-976e-4f79-adc7-0b4e1a73ad73" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.574910] env[63241]: DEBUG oslo_concurrency.lockutils [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "f583adda-976e-4f79-adc7-0b4e1a73ad73" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.575139] env[63241]: DEBUG oslo_concurrency.lockutils [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "f583adda-976e-4f79-adc7-0b4e1a73ad73-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.575320] env[63241]: DEBUG oslo_concurrency.lockutils [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "f583adda-976e-4f79-adc7-0b4e1a73ad73-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.575558] env[63241]: DEBUG oslo_concurrency.lockutils [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "f583adda-976e-4f79-adc7-0b4e1a73ad73-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.582170] env[63241]: INFO nova.compute.manager [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Terminating instance [ 1634.586051] env[63241]: DEBUG nova.compute.manager [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1634.586051] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1634.588187] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1ae90b-d1eb-4013-aee0-a0ae1b489906 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.593044] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529fcb45-3194-17ef-490e-2a4cb5f8f0eb, 'name': SearchDatastore_Task, 'duration_secs': 0.019807} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.593773] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.594117] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 14af9f82-525e-453c-8dc5-ef5b13c67ee4/14af9f82-525e-453c-8dc5-ef5b13c67ee4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1634.594377] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0035c9a-d585-4c06-9e2d-17cd36d00855 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.598842] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1634.599422] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a137bf22-f7b7-4fb1-bc91-47c94dde22c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.603458] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1634.603458] env[63241]: value = "task-1820557" [ 1634.603458] env[63241]: _type = "Task" [ 1634.603458] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.608228] env[63241]: DEBUG oslo_vmware.api [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1634.608228] env[63241]: value = "task-1820558" [ 1634.608228] env[63241]: _type = "Task" [ 1634.608228] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.617333] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.624729] env[63241]: DEBUG oslo_vmware.api [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.630062] env[63241]: DEBUG oslo_vmware.api [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820555, 'name': PowerOnVM_Task, 'duration_secs': 0.489672} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.630062] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1634.630062] env[63241]: INFO nova.compute.manager [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Took 7.88 seconds to spawn the instance on the hypervisor. [ 1634.630062] env[63241]: DEBUG nova.compute.manager [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1634.630062] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0839e545-4b3b-454d-8359-042c14316d2f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.711680] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820552, 'name': Rename_Task, 'duration_secs': 1.242183} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.712526] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1634.712526] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9bd746a3-83fa-431a-8494-81483a64f5c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.718758] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for the task: (returnval){ [ 1634.718758] env[63241]: value = "task-1820559" [ 1634.718758] env[63241]: _type = "Task" [ 1634.718758] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.728970] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820559, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.824386] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820556, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.898234] env[63241]: ERROR nova.scheduler.client.report [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [req-44b88598-34aa-4f19-baa0-27bdf94296a2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-44b88598-34aa-4f19-baa0-27bdf94296a2"}]} [ 1634.915992] env[63241]: DEBUG nova.scheduler.client.report [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1634.935980] env[63241]: DEBUG nova.scheduler.client.report [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1634.936256] env[63241]: DEBUG nova.compute.provider_tree [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1634.948964] env[63241]: DEBUG nova.scheduler.client.report [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1634.977563] env[63241]: DEBUG nova.scheduler.client.report [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1635.123316] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820557, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.127422] env[63241]: DEBUG oslo_vmware.api [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820558, 'name': PowerOffVM_Task, 'duration_secs': 0.175256} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.129866] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1635.129866] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1635.129866] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca5abffa-dc5d-433c-9c62-6d971781a96a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.155948] env[63241]: INFO nova.compute.manager [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Took 48.68 seconds to build instance. [ 1635.231911] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820559, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.326467] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820556, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.365031] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bf1bf0-4beb-4409-97a3-2d9c56b81e4b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.377018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5c91dd-b783-41c2-87cf-9924a50260e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.407081] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af136ce2-aa87-4516-a7ec-f5b9381b6a65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.415814] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e279c24e-523f-48ed-9a39-044c1bf38824 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.430483] env[63241]: DEBUG nova.compute.provider_tree [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1635.436224] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1635.436451] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1635.436636] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleting the datastore file [datastore1] f583adda-976e-4f79-adc7-0b4e1a73ad73 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1635.437200] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0186f97f-74e8-44f0-904c-2656c9be2412 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.448136] env[63241]: DEBUG oslo_vmware.api [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1635.448136] env[63241]: value = "task-1820561" [ 1635.448136] env[63241]: _type = "Task" [ 1635.448136] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.457808] env[63241]: DEBUG oslo_vmware.api [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.616643] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820557, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570975} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.616901] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 14af9f82-525e-453c-8dc5-ef5b13c67ee4/14af9f82-525e-453c-8dc5-ef5b13c67ee4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1635.617124] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1635.617741] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20694385-3d15-405b-8a3d-3bc684fdc625 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.624992] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1635.624992] env[63241]: value = "task-1820562" [ 1635.624992] env[63241]: _type = "Task" [ 1635.624992] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.633804] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820562, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.659537] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a3817192-fd48-4573-86d8-39dbe48e0b18 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "fb5d60fa-fa13-44a1-8291-4645761a0c80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.197s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.734021] env[63241]: DEBUG oslo_vmware.api [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820559, 'name': PowerOnVM_Task, 'duration_secs': 0.789941} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.734021] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1635.734021] env[63241]: INFO nova.compute.manager [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Took 12.56 seconds to spawn the instance on the hypervisor. [ 1635.734021] env[63241]: DEBUG nova.compute.manager [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1635.734021] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97320cb1-bb9b-4e7f-b0aa-d2a10d65f382 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.825915] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820556, 'name': CloneVM_Task} progress is 95%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.957806] env[63241]: DEBUG oslo_vmware.api [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820561, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.41322} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.958072] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1635.958258] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1635.958440] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1635.958613] env[63241]: INFO nova.compute.manager [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Took 1.37 seconds to destroy the instance on the hypervisor. [ 1635.958854] env[63241]: DEBUG oslo.service.loopingcall [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1635.959053] env[63241]: DEBUG nova.compute.manager [-] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1635.959153] env[63241]: DEBUG nova.network.neutron [-] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1635.983129] env[63241]: DEBUG nova.scheduler.client.report [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 98 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1635.983129] env[63241]: DEBUG nova.compute.provider_tree [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 98 to 99 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1635.983129] env[63241]: DEBUG nova.compute.provider_tree [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1636.139080] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820562, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.217111} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.139194] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1636.139953] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ce1f50-c7c4-4859-8086-f2a5e74cd8d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.164149] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 14af9f82-525e-453c-8dc5-ef5b13c67ee4/14af9f82-525e-453c-8dc5-ef5b13c67ee4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1636.164462] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80f7bb11-d9ee-4d33-bbfd-1701c7ebb28f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.185070] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1636.185070] env[63241]: value = "task-1820563" [ 1636.185070] env[63241]: _type = "Task" [ 1636.185070] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.197113] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820563, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.258876] env[63241]: INFO nova.compute.manager [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Took 51.54 seconds to build instance. [ 1636.332966] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820556, 'name': CloneVM_Task, 'duration_secs': 1.67561} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.333750] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Created linked-clone VM from snapshot [ 1636.334767] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200058ca-1d1c-4187-852c-36fcc530fa28 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.344363] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Uploading image 80f341e6-b28c-4c17-98c5-f9d084a69516 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1636.375049] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1636.375049] env[63241]: value = "vm-377118" [ 1636.375049] env[63241]: _type = "VirtualMachine" [ 1636.375049] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1636.375049] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6b91ac1a-f708-4520-bc99-bc0ee5d91662 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.385306] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lease: (returnval){ [ 1636.385306] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524de589-76f6-b4e5-00bf-b0133c797cc5" [ 1636.385306] env[63241]: _type = "HttpNfcLease" [ 1636.385306] env[63241]: } obtained for exporting VM: (result){ [ 1636.385306] env[63241]: value = "vm-377118" [ 1636.385306] env[63241]: _type = "VirtualMachine" [ 1636.385306] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1636.385306] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the lease: (returnval){ [ 1636.385306] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524de589-76f6-b4e5-00bf-b0133c797cc5" [ 1636.385306] env[63241]: _type = "HttpNfcLease" [ 1636.385306] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1636.392090] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1636.392090] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524de589-76f6-b4e5-00bf-b0133c797cc5" [ 1636.392090] env[63241]: _type = "HttpNfcLease" [ 1636.392090] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1636.406326] env[63241]: DEBUG nova.compute.manager [req-08d1999d-14d5-4074-b18d-39972052ac69 req-98fb1ae4-6067-4346-bd16-5c95e9bca5a0 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Received event network-vif-deleted-89afe680-269b-4e65-8447-6e663afc4dca {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1636.406326] env[63241]: INFO nova.compute.manager [req-08d1999d-14d5-4074-b18d-39972052ac69 req-98fb1ae4-6067-4346-bd16-5c95e9bca5a0 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Neutron deleted interface 89afe680-269b-4e65-8447-6e663afc4dca; detaching it from the instance and deleting it from the info cache [ 1636.406326] env[63241]: DEBUG nova.network.neutron [req-08d1999d-14d5-4074-b18d-39972052ac69 req-98fb1ae4-6067-4346-bd16-5c95e9bca5a0 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.494127] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.577s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.494127] env[63241]: DEBUG nova.compute.manager [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1636.495448] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.481s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.495917] env[63241]: DEBUG nova.objects.instance [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lazy-loading 'resources' on Instance uuid 9e6ca606-383d-42f0-aea4-edecde33c1a4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1636.634156] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "7f1710d0-857d-41fc-8151-8c5e129dda08" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.634632] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "7f1710d0-857d-41fc-8151-8c5e129dda08" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.695241] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820563, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.761414] env[63241]: DEBUG oslo_concurrency.lockutils [None req-13b839ef-ce16-43de-bbc8-856962885e1b tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "20c7a1a1-4396-414f-a52c-06551722b6eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.051s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.828701] env[63241]: DEBUG nova.network.neutron [-] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.894280] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1636.894280] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524de589-76f6-b4e5-00bf-b0133c797cc5" [ 1636.894280] env[63241]: _type = "HttpNfcLease" [ 1636.894280] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1636.895098] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1636.895098] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524de589-76f6-b4e5-00bf-b0133c797cc5" [ 1636.895098] env[63241]: _type = "HttpNfcLease" [ 1636.895098] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1636.896334] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13ecaf7-1586-439a-8c50-005e5b40c95b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.906408] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235e0f1-ad73-4c09-32cd-a7cdc57d64c8/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1636.906485] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235e0f1-ad73-4c09-32cd-a7cdc57d64c8/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1636.908222] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-53b21a47-d39f-42d8-8579-81c2482fa83a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.969316] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69f5c87-926a-4570-a4a5-cb739946a91b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.981393] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquiring lock "20c7a1a1-4396-414f-a52c-06551722b6eb" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.981638] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "20c7a1a1-4396-414f-a52c-06551722b6eb" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.981860] env[63241]: INFO nova.compute.manager [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Rebooting instance [ 1637.008462] env[63241]: DEBUG nova.compute.utils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1637.012340] env[63241]: DEBUG nova.compute.manager [req-08d1999d-14d5-4074-b18d-39972052ac69 req-98fb1ae4-6067-4346-bd16-5c95e9bca5a0 service nova] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Detach interface failed, port_id=89afe680-269b-4e65-8447-6e663afc4dca, reason: Instance f583adda-976e-4f79-adc7-0b4e1a73ad73 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1637.016623] env[63241]: DEBUG nova.compute.manager [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1637.016623] env[63241]: DEBUG nova.network.neutron [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1637.018199] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c5778669-6ff6-46ad-be7d-e9a33889debe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.086818] env[63241]: DEBUG nova.policy [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa5224c96c3545269f4f45be620a7cdf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98582d7ee18145318ee5a05cac36781e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1637.137697] env[63241]: DEBUG nova.compute.manager [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1637.203235] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820563, 'name': ReconfigVM_Task, 'duration_secs': 0.881138} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.203587] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 14af9f82-525e-453c-8dc5-ef5b13c67ee4/14af9f82-525e-453c-8dc5-ef5b13c67ee4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1637.204321] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e77fb38-d2ed-4647-99a4-b522fb00a42b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.212352] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1637.212352] env[63241]: value = "task-1820565" [ 1637.212352] env[63241]: _type = "Task" [ 1637.212352] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.222346] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820565, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.338931] env[63241]: INFO nova.compute.manager [-] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Took 1.38 seconds to deallocate network for instance. [ 1637.414830] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2053e84-4cc0-4650-b7e7-a94af3254590 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.428482] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677edce5-4acd-4332-9994-3c40c8cb3683 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.464192] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b079c85e-46df-4729-bcf2-69dd7c9d0af6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.472611] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2a9362-8288-43ec-be1b-7de4579ef5a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.491130] env[63241]: DEBUG nova.compute.provider_tree [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1637.508529] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquiring lock "refresh_cache-20c7a1a1-4396-414f-a52c-06551722b6eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1637.508647] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquired lock "refresh_cache-20c7a1a1-4396-414f-a52c-06551722b6eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1637.508787] env[63241]: DEBUG nova.network.neutron [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1637.514869] env[63241]: DEBUG nova.compute.manager [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1637.556801] env[63241]: DEBUG nova.network.neutron [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Successfully created port: 5546d295-8d78-4143-b874-e6cc21c5945a {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1637.663798] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.722412] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820565, 'name': Rename_Task, 'duration_secs': 0.1597} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.722776] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1637.723092] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5bb80b09-8084-442b-bb43-d3b20a698c38 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.729537] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1637.729537] env[63241]: value = "task-1820566" [ 1637.729537] env[63241]: _type = "Task" [ 1637.729537] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.739515] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.846391] env[63241]: DEBUG oslo_concurrency.lockutils [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.998019] env[63241]: DEBUG nova.scheduler.client.report [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1638.240447] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820566, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.280646] env[63241]: DEBUG nova.network.neutron [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Updating instance_info_cache with network_info: [{"id": "871c0af7-a67d-4b89-b5aa-bc46ce52b6b5", "address": "fa:16:3e:5f:97:7e", "network": {"id": "3d384599-a1b0-4ab0-91ff-44e793fe0605", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1391741783-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa6725d063734b7183d5a1ef4106ef60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d5d0e0d-cdec-474a-a891-a9ceff15a8b2", "external-id": "nsx-vlan-transportzone-456", "segmentation_id": 456, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap871c0af7-a6", "ovs_interfaceid": "871c0af7-a67d-4b89-b5aa-bc46ce52b6b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1638.504010] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.008s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.511838] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.131s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.513197] env[63241]: DEBUG nova.objects.instance [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lazy-loading 'resources' on Instance uuid ef36a081-6273-4397-b48f-c2bd03d0a865 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1638.523086] env[63241]: DEBUG nova.compute.manager [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1638.535100] env[63241]: INFO nova.scheduler.client.report [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted allocations for instance 9e6ca606-383d-42f0-aea4-edecde33c1a4 [ 1638.557611] env[63241]: DEBUG nova.virt.hardware [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1638.557899] env[63241]: DEBUG nova.virt.hardware [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1638.558075] env[63241]: DEBUG nova.virt.hardware [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1638.558266] env[63241]: DEBUG nova.virt.hardware [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1638.558417] env[63241]: DEBUG nova.virt.hardware [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1638.558575] env[63241]: DEBUG nova.virt.hardware [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1638.558855] env[63241]: DEBUG nova.virt.hardware [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1638.559138] env[63241]: DEBUG nova.virt.hardware [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1638.559396] env[63241]: DEBUG nova.virt.hardware [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1638.559591] env[63241]: DEBUG nova.virt.hardware [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1638.559773] env[63241]: DEBUG nova.virt.hardware [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1638.561078] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3425a7a-c2ec-4cf8-90a4-642c9c286e59 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.569985] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c699f8f3-e598-4884-81ec-4df70be9be2f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.740205] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820566, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.783069] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Releasing lock "refresh_cache-20c7a1a1-4396-414f-a52c-06551722b6eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.785356] env[63241]: DEBUG nova.compute.manager [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1638.786296] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2caa1ba-76cc-4f02-9929-9aee6fd9ef69 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.043496] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b45bca-9934-47ac-82a4-695e61c0feac tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "9e6ca606-383d-42f0-aea4-edecde33c1a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.998s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.189318] env[63241]: DEBUG nova.network.neutron [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Successfully updated port: 5546d295-8d78-4143-b874-e6cc21c5945a {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1639.205513] env[63241]: DEBUG nova.compute.manager [req-e32d4419-0c57-40ab-b690-8a7508885f81 req-61eb0d43-7442-4287-a655-48ccb6e58e0c service nova] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Received event network-vif-plugged-5546d295-8d78-4143-b874-e6cc21c5945a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1639.206324] env[63241]: DEBUG oslo_concurrency.lockutils [req-e32d4419-0c57-40ab-b690-8a7508885f81 req-61eb0d43-7442-4287-a655-48ccb6e58e0c service nova] Acquiring lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.206732] env[63241]: DEBUG oslo_concurrency.lockutils [req-e32d4419-0c57-40ab-b690-8a7508885f81 req-61eb0d43-7442-4287-a655-48ccb6e58e0c service nova] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.207160] env[63241]: DEBUG oslo_concurrency.lockutils [req-e32d4419-0c57-40ab-b690-8a7508885f81 req-61eb0d43-7442-4287-a655-48ccb6e58e0c service nova] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.207396] env[63241]: DEBUG nova.compute.manager [req-e32d4419-0c57-40ab-b690-8a7508885f81 req-61eb0d43-7442-4287-a655-48ccb6e58e0c service nova] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] No waiting events found dispatching network-vif-plugged-5546d295-8d78-4143-b874-e6cc21c5945a {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1639.207723] env[63241]: WARNING nova.compute.manager [req-e32d4419-0c57-40ab-b690-8a7508885f81 req-61eb0d43-7442-4287-a655-48ccb6e58e0c service nova] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Received unexpected event network-vif-plugged-5546d295-8d78-4143-b874-e6cc21c5945a for instance with vm_state building and task_state spawning. [ 1639.244557] env[63241]: DEBUG oslo_vmware.api [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820566, 'name': PowerOnVM_Task, 'duration_secs': 1.433876} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.244877] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1639.245197] env[63241]: INFO nova.compute.manager [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Took 9.96 seconds to spawn the instance on the hypervisor. [ 1639.245440] env[63241]: DEBUG nova.compute.manager [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1639.246382] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a036844-aad7-423c-b99d-6a5e7ef27169 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.342264] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3adc07-65e5-4c68-bbb6-06367cba4537 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.350341] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a58b28-b170-4b78-b666-c46230d0e6e8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.380373] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c579c0b-c4a0-4b37-ac47-dc6494841217 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.388152] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc597c7-5821-4d16-ac0b-0cfa6b334b55 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.401544] env[63241]: DEBUG nova.compute.provider_tree [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1639.695668] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.695668] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.695668] env[63241]: DEBUG nova.network.neutron [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1639.769112] env[63241]: INFO nova.compute.manager [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Took 41.56 seconds to build instance. [ 1639.808017] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de525273-ff56-4dce-8de1-8da199be5d57 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.814207] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Doing hard reboot of VM {{(pid=63241) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1639.814663] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-03ff4561-0f44-478e-8dc1-dbe4f57ed038 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.820642] env[63241]: DEBUG oslo_vmware.api [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for the task: (returnval){ [ 1639.820642] env[63241]: value = "task-1820567" [ 1639.820642] env[63241]: _type = "Task" [ 1639.820642] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.829175] env[63241]: DEBUG oslo_vmware.api [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820567, 'name': ResetVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.904526] env[63241]: DEBUG nova.scheduler.client.report [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1640.246187] env[63241]: DEBUG nova.network.neutron [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1640.272026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e50464d0-9af6-4e16-948c-9b8e07c53073 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.145s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.331043] env[63241]: DEBUG oslo_vmware.api [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820567, 'name': ResetVM_Task, 'duration_secs': 0.109147} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.331342] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Did hard reboot of VM {{(pid=63241) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1640.331948] env[63241]: DEBUG nova.compute.manager [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1640.332326] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9087f919-8a1d-4570-9f3b-cf68651029dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.400976] env[63241]: DEBUG nova.network.neutron [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance_info_cache with network_info: [{"id": "5546d295-8d78-4143-b874-e6cc21c5945a", "address": "fa:16:3e:8b:83:aa", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5546d295-8d", "ovs_interfaceid": "5546d295-8d78-4143-b874-e6cc21c5945a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1640.411211] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.900s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.414486] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.885s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.416240] env[63241]: INFO nova.compute.claims [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1640.431516] env[63241]: INFO nova.scheduler.client.report [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Deleted allocations for instance ef36a081-6273-4397-b48f-c2bd03d0a865 [ 1640.512123] env[63241]: INFO nova.compute.manager [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Rescuing [ 1640.512521] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "refresh_cache-14af9f82-525e-453c-8dc5-ef5b13c67ee4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.512709] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "refresh_cache-14af9f82-525e-453c-8dc5-ef5b13c67ee4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.512975] env[63241]: DEBUG nova.network.neutron [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1640.549952] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.550224] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.844685] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61ad26b7-a2a0-4c92-8660-18ecd78f9ca8 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "20c7a1a1-4396-414f-a52c-06551722b6eb" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.863s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.903493] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1640.904056] env[63241]: DEBUG nova.compute.manager [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Instance network_info: |[{"id": "5546d295-8d78-4143-b874-e6cc21c5945a", "address": "fa:16:3e:8b:83:aa", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5546d295-8d", "ovs_interfaceid": "5546d295-8d78-4143-b874-e6cc21c5945a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1640.904549] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:83:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '111a2767-1b06-4fe5-852b-40c9b5a699fd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5546d295-8d78-4143-b874-e6cc21c5945a', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1640.912575] env[63241]: DEBUG oslo.service.loopingcall [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1640.912886] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1640.913169] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f72f26b-170e-464b-9686-ee9efdf876ae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.935942] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1640.935942] env[63241]: value = "task-1820568" [ 1640.935942] env[63241]: _type = "Task" [ 1640.935942] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.944561] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b7ad09b7-f751-46e1-996d-78c66d1f4cf0 tempest-ServersAaction247Test-168031357 tempest-ServersAaction247Test-168031357-project-member] Lock "ef36a081-6273-4397-b48f-c2bd03d0a865" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.269s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.950411] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820568, 'name': CreateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.052976] env[63241]: DEBUG nova.compute.manager [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1641.246587] env[63241]: DEBUG nova.network.neutron [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Updating instance_info_cache with network_info: [{"id": "f11430c6-423c-4742-b139-54246d031151", "address": "fa:16:3e:5f:48:28", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf11430c6-42", "ovs_interfaceid": "f11430c6-423c-4742-b139-54246d031151", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.376897] env[63241]: DEBUG nova.compute.manager [req-69fd89f5-ddb3-48d3-9766-fad294a5122c req-9da14743-b9c8-4ac6-abe9-88e993bfee41 service nova] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Received event network-changed-5546d295-8d78-4143-b874-e6cc21c5945a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1641.377120] env[63241]: DEBUG nova.compute.manager [req-69fd89f5-ddb3-48d3-9766-fad294a5122c req-9da14743-b9c8-4ac6-abe9-88e993bfee41 service nova] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Refreshing instance network info cache due to event network-changed-5546d295-8d78-4143-b874-e6cc21c5945a. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1641.377361] env[63241]: DEBUG oslo_concurrency.lockutils [req-69fd89f5-ddb3-48d3-9766-fad294a5122c req-9da14743-b9c8-4ac6-abe9-88e993bfee41 service nova] Acquiring lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.377830] env[63241]: DEBUG oslo_concurrency.lockutils [req-69fd89f5-ddb3-48d3-9766-fad294a5122c req-9da14743-b9c8-4ac6-abe9-88e993bfee41 service nova] Acquired lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.378123] env[63241]: DEBUG nova.network.neutron [req-69fd89f5-ddb3-48d3-9766-fad294a5122c req-9da14743-b9c8-4ac6-abe9-88e993bfee41 service nova] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Refreshing network info cache for port 5546d295-8d78-4143-b874-e6cc21c5945a {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1641.447190] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820568, 'name': CreateVM_Task, 'duration_secs': 0.497164} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.447190] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1641.448031] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.448144] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.448454] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1641.448756] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64c661e4-a4bc-4bd5-8b6c-681b7846116b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.454224] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1641.454224] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520f6122-0070-ead0-ee3f-ef8bc1884c50" [ 1641.454224] env[63241]: _type = "Task" [ 1641.454224] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.466292] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520f6122-0070-ead0-ee3f-ef8bc1884c50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.577301] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.743461] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc26284-f773-4a53-94a2-d132e4812fa0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.752422] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "refresh_cache-14af9f82-525e-453c-8dc5-ef5b13c67ee4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.755727] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa89927-f682-43ef-967c-0203332e8198 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.791588] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fda5bb0-4027-4590-9af4-e786c4a5c37d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.800241] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c07103-5bc0-4540-8d6d-e7614611e5fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.818624] env[63241]: DEBUG nova.compute.provider_tree [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1641.966543] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520f6122-0070-ead0-ee3f-ef8bc1884c50, 'name': SearchDatastore_Task, 'duration_secs': 0.019532} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.969258] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.969519] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1641.969771] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.969937] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.970145] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1641.970426] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09b289c8-af0e-4a24-ac92-b59eda1356c3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.979945] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1641.980172] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1641.980990] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b22a3fa0-1a1a-42ce-af1f-a13da94e4a71 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.986428] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1641.986428] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5262ea7e-a962-4b32-0c43-ff3fa17a242a" [ 1641.986428] env[63241]: _type = "Task" [ 1641.986428] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.996765] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5262ea7e-a962-4b32-0c43-ff3fa17a242a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.057276] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquiring lock "20c7a1a1-4396-414f-a52c-06551722b6eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.057546] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "20c7a1a1-4396-414f-a52c-06551722b6eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.057789] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquiring lock "20c7a1a1-4396-414f-a52c-06551722b6eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.058017] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "20c7a1a1-4396-414f-a52c-06551722b6eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.058206] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "20c7a1a1-4396-414f-a52c-06551722b6eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.061915] env[63241]: INFO nova.compute.manager [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Terminating instance [ 1642.064050] env[63241]: DEBUG nova.compute.manager [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1642.064292] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1642.065219] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053007d4-5299-4271-99eb-a582608c915f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.073543] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1642.073875] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83f955ed-3644-440b-87e9-979264740849 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.080973] env[63241]: DEBUG oslo_vmware.api [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for the task: (returnval){ [ 1642.080973] env[63241]: value = "task-1820569" [ 1642.080973] env[63241]: _type = "Task" [ 1642.080973] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.092394] env[63241]: DEBUG oslo_vmware.api [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820569, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.124916] env[63241]: DEBUG nova.network.neutron [req-69fd89f5-ddb3-48d3-9766-fad294a5122c req-9da14743-b9c8-4ac6-abe9-88e993bfee41 service nova] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updated VIF entry in instance network info cache for port 5546d295-8d78-4143-b874-e6cc21c5945a. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1642.125345] env[63241]: DEBUG nova.network.neutron [req-69fd89f5-ddb3-48d3-9766-fad294a5122c req-9da14743-b9c8-4ac6-abe9-88e993bfee41 service nova] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance_info_cache with network_info: [{"id": "5546d295-8d78-4143-b874-e6cc21c5945a", "address": "fa:16:3e:8b:83:aa", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5546d295-8d", "ovs_interfaceid": "5546d295-8d78-4143-b874-e6cc21c5945a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1642.313628] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1642.314068] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b33f106-eb79-4cb1-822a-993981918f84 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.322609] env[63241]: DEBUG nova.scheduler.client.report [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1642.327111] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1642.327111] env[63241]: value = "task-1820570" [ 1642.327111] env[63241]: _type = "Task" [ 1642.327111] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.336568] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820570, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.498408] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5262ea7e-a962-4b32-0c43-ff3fa17a242a, 'name': SearchDatastore_Task, 'duration_secs': 0.015649} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.499254] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-548e70e4-4f39-45e2-9f80-36085cfde604 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.505429] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1642.505429] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529553e1-f06e-82b8-cc8a-d9d5f46ec2c3" [ 1642.505429] env[63241]: _type = "Task" [ 1642.505429] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.515055] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529553e1-f06e-82b8-cc8a-d9d5f46ec2c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.590680] env[63241]: DEBUG oslo_vmware.api [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820569, 'name': PowerOffVM_Task, 'duration_secs': 0.497512} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.591086] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1642.591258] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1642.591532] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13dc6bab-a67c-4b23-b240-ff069222f038 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.627711] env[63241]: DEBUG oslo_concurrency.lockutils [req-69fd89f5-ddb3-48d3-9766-fad294a5122c req-9da14743-b9c8-4ac6-abe9-88e993bfee41 service nova] Releasing lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.664813] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1642.664962] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1642.665184] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Deleting the datastore file [datastore1] 20c7a1a1-4396-414f-a52c-06551722b6eb {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1642.665438] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7d4c49a-5684-49bc-be92-5231d52c7b4d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.672594] env[63241]: DEBUG oslo_vmware.api [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for the task: (returnval){ [ 1642.672594] env[63241]: value = "task-1820572" [ 1642.672594] env[63241]: _type = "Task" [ 1642.672594] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.681348] env[63241]: DEBUG oslo_vmware.api [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820572, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.828666] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.829289] env[63241]: DEBUG nova.compute.manager [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1642.832330] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.833s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.832544] env[63241]: DEBUG nova.objects.instance [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lazy-loading 'resources' on Instance uuid 0e5447fd-a04f-4bc2-b329-e015883773b8 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1642.842103] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820570, 'name': PowerOffVM_Task, 'duration_secs': 0.261841} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.842373] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1642.843206] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1946d708-a9be-41c4-81ea-e906e0e83587 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.862757] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7f2ef4-60a0-46b2-9d3b-1639f27ddbc2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.891559] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1642.891875] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fb3bfee-1185-4ff8-a1ab-1a2a95bda3b1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.901085] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "1dc98fbd-a52b-42fa-8d37-d14318dbc941" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.901331] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "1dc98fbd-a52b-42fa-8d37-d14318dbc941" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.902749] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1642.902749] env[63241]: value = "task-1820573" [ 1642.902749] env[63241]: _type = "Task" [ 1642.902749] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.912217] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1642.912494] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1642.912818] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.016013] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529553e1-f06e-82b8-cc8a-d9d5f46ec2c3, 'name': SearchDatastore_Task, 'duration_secs': 0.020951} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.016315] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.016578] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] cb7eb689-b8f6-479d-aa6b-c27fab16e131/cb7eb689-b8f6-479d-aa6b-c27fab16e131.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1643.016858] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.017064] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1643.017296] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e75baf8e-10d0-41fe-a949-9ecea400701b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.019226] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-051498da-9974-4720-98fa-6dd13f820bbd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.026067] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1643.026067] env[63241]: value = "task-1820574" [ 1643.026067] env[63241]: _type = "Task" [ 1643.026067] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.030339] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1643.030542] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1643.032143] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbce8d6e-a733-42e3-be63-aae5c32ca420 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.037194] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820574, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.040108] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1643.040108] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d92806-bb51-ed6a-c3e2-c6e36b72a20c" [ 1643.040108] env[63241]: _type = "Task" [ 1643.040108] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.047377] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d92806-bb51-ed6a-c3e2-c6e36b72a20c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.127824] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "4a57d04b-72a0-4db3-8119-994b67e4b096" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.128108] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "4a57d04b-72a0-4db3-8119-994b67e4b096" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.182949] env[63241]: DEBUG oslo_vmware.api [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Task: {'id': task-1820572, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161393} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.183316] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1643.183452] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1643.183659] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1643.183898] env[63241]: INFO nova.compute.manager [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1643.184197] env[63241]: DEBUG oslo.service.loopingcall [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1643.184394] env[63241]: DEBUG nova.compute.manager [-] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1643.184493] env[63241]: DEBUG nova.network.neutron [-] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1643.342184] env[63241]: DEBUG nova.compute.utils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1643.344729] env[63241]: DEBUG nova.compute.manager [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1643.344729] env[63241]: DEBUG nova.network.neutron [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1643.405659] env[63241]: DEBUG nova.compute.manager [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1643.420171] env[63241]: DEBUG nova.policy [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '14613a184b484917816fe8d185332aa0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9065e95ad1fe4fc1a9fd588c07f5609c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1643.538032] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820574, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.553159] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d92806-bb51-ed6a-c3e2-c6e36b72a20c, 'name': SearchDatastore_Task, 'duration_secs': 0.010282} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.554322] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b6f33fb-6141-4f71-aa45-c8e22775c282 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.565038] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1643.565038] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52983cb4-7a31-7381-29d2-3400976e28fc" [ 1643.565038] env[63241]: _type = "Task" [ 1643.565038] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.576583] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52983cb4-7a31-7381-29d2-3400976e28fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.630538] env[63241]: DEBUG nova.compute.manager [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1643.736268] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b58ef93-4f1d-4644-bb31-154afd36f8c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.746091] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986aee6d-528d-4194-ac49-94fc583eb5de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.751595] env[63241]: DEBUG nova.compute.manager [req-6a4d6ec0-8ec6-4d87-b6af-858b9b47e232 req-6ea5a3f5-f781-4875-839e-fcdf4cf2835e service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Received event network-vif-deleted-871c0af7-a67d-4b89-b5aa-bc46ce52b6b5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1643.751796] env[63241]: INFO nova.compute.manager [req-6a4d6ec0-8ec6-4d87-b6af-858b9b47e232 req-6ea5a3f5-f781-4875-839e-fcdf4cf2835e service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Neutron deleted interface 871c0af7-a67d-4b89-b5aa-bc46ce52b6b5; detaching it from the instance and deleting it from the info cache [ 1643.751973] env[63241]: DEBUG nova.network.neutron [req-6a4d6ec0-8ec6-4d87-b6af-858b9b47e232 req-6ea5a3f5-f781-4875-839e-fcdf4cf2835e service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.786282] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4723d072-f0db-40b9-b333-5cc17eb69915 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.795756] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20eddcef-357a-4bc5-ae0c-35e6f8168988 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.811957] env[63241]: DEBUG nova.compute.provider_tree [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1643.833775] env[63241]: DEBUG nova.network.neutron [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Successfully created port: e6572a16-56c8-4c47-99ab-22d12f1f2ffc {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1643.847654] env[63241]: DEBUG nova.compute.manager [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1643.929790] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.038186] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820574, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577456} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.038551] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] cb7eb689-b8f6-479d-aa6b-c27fab16e131/cb7eb689-b8f6-479d-aa6b-c27fab16e131.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1644.038933] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1644.039142] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7535e470-0862-4e83-a1ac-0ebdc3988b8f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.046513] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1644.046513] env[63241]: value = "task-1820575" [ 1644.046513] env[63241]: _type = "Task" [ 1644.046513] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.055154] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820575, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.075668] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52983cb4-7a31-7381-29d2-3400976e28fc, 'name': SearchDatastore_Task, 'duration_secs': 0.026312} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.075976] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.076285] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 14af9f82-525e-453c-8dc5-ef5b13c67ee4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. {{(pid=63241) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1644.076617] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18a8925a-ad90-4a14-8e48-833dd7daa56b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.083845] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1644.083845] env[63241]: value = "task-1820576" [ 1644.083845] env[63241]: _type = "Task" [ 1644.083845] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.092984] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820576, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.163043] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.186808] env[63241]: DEBUG nova.network.neutron [-] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.255180] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2cb1db03-6b55-4314-8342-1ab2afc0304d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.267970] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069adc7a-f022-487e-ab91-5e2fdeb06d1a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.310245] env[63241]: DEBUG nova.compute.manager [req-6a4d6ec0-8ec6-4d87-b6af-858b9b47e232 req-6ea5a3f5-f781-4875-839e-fcdf4cf2835e service nova] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Detach interface failed, port_id=871c0af7-a67d-4b89-b5aa-bc46ce52b6b5, reason: Instance 20c7a1a1-4396-414f-a52c-06551722b6eb could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1644.316718] env[63241]: DEBUG nova.scheduler.client.report [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1644.418571] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235e0f1-ad73-4c09-32cd-a7cdc57d64c8/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1644.419758] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7fa912-34ed-469c-94e3-ea5018befacb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.426707] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235e0f1-ad73-4c09-32cd-a7cdc57d64c8/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1644.426900] env[63241]: ERROR oslo_vmware.rw_handles [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235e0f1-ad73-4c09-32cd-a7cdc57d64c8/disk-0.vmdk due to incomplete transfer. [ 1644.427173] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-88b3cbf5-1371-4ac7-967b-50718c044129 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.434819] env[63241]: DEBUG oslo_vmware.rw_handles [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235e0f1-ad73-4c09-32cd-a7cdc57d64c8/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1644.435052] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Uploaded image 80f341e6-b28c-4c17-98c5-f9d084a69516 to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1644.437379] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1644.437656] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-322857ba-4fc0-45c4-b892-85fd9a323f79 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.445349] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1644.445349] env[63241]: value = "task-1820577" [ 1644.445349] env[63241]: _type = "Task" [ 1644.445349] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.454601] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820577, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.556413] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820575, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114858} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.556709] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1644.557566] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce449566-435b-4d42-a172-237e43faa959 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.580252] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] cb7eb689-b8f6-479d-aa6b-c27fab16e131/cb7eb689-b8f6-479d-aa6b-c27fab16e131.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1644.580609] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5f02bd4-88a5-48fa-9403-f060397c3f4f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.604282] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820576, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.605870] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1644.605870] env[63241]: value = "task-1820578" [ 1644.605870] env[63241]: _type = "Task" [ 1644.605870] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.614680] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820578, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.690682] env[63241]: INFO nova.compute.manager [-] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Took 1.51 seconds to deallocate network for instance. [ 1644.823265] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.990s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.825388] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.336s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.827068] env[63241]: INFO nova.compute.claims [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1644.850745] env[63241]: INFO nova.scheduler.client.report [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleted allocations for instance 0e5447fd-a04f-4bc2-b329-e015883773b8 [ 1644.858179] env[63241]: DEBUG nova.compute.manager [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1644.884326] env[63241]: DEBUG nova.virt.hardware [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1644.884581] env[63241]: DEBUG nova.virt.hardware [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1644.884848] env[63241]: DEBUG nova.virt.hardware [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1644.884848] env[63241]: DEBUG nova.virt.hardware [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1644.885033] env[63241]: DEBUG nova.virt.hardware [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1644.885203] env[63241]: DEBUG nova.virt.hardware [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1644.885435] env[63241]: DEBUG nova.virt.hardware [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1644.885599] env[63241]: DEBUG nova.virt.hardware [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1644.885773] env[63241]: DEBUG nova.virt.hardware [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1644.885954] env[63241]: DEBUG nova.virt.hardware [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1644.886211] env[63241]: DEBUG nova.virt.hardware [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1644.887465] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b885bc3-c767-4080-a3b9-5dcfa5a3b45e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.896065] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084a75d7-b441-498b-b6f2-df0cce3016d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.954694] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820577, 'name': Destroy_Task, 'duration_secs': 0.468099} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.954976] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Destroyed the VM [ 1644.955322] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1644.955588] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-46e81487-82db-4359-8861-7fb0dc7b923e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.961526] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1644.961526] env[63241]: value = "task-1820579" [ 1644.961526] env[63241]: _type = "Task" [ 1644.961526] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.969475] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820579, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.104875] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820576, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556646} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.105145] env[63241]: INFO nova.virt.vmwareapi.ds_util [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 14af9f82-525e-453c-8dc5-ef5b13c67ee4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. [ 1645.105900] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc2bb72-f90d-4215-b236-d38bed787af0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.133156] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 14af9f82-525e-453c-8dc5-ef5b13c67ee4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1645.136277] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3aacc27-eafe-4c50-9370-138ffff9a6f8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.148532] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820578, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.153335] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1645.153335] env[63241]: value = "task-1820580" [ 1645.153335] env[63241]: _type = "Task" [ 1645.153335] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.162223] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820580, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.198711] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.360496] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7e8e231-49db-4380-92be-f1f3ae3f2f35 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "0e5447fd-a04f-4bc2-b329-e015883773b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.537s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.473280] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820579, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.523125] env[63241]: DEBUG nova.compute.manager [req-305e12dc-a062-45c3-bc79-4ee3eda0f468 req-52981684-a4d7-477d-996a-c8dd79cb8df8 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Received event network-vif-plugged-e6572a16-56c8-4c47-99ab-22d12f1f2ffc {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1645.523371] env[63241]: DEBUG oslo_concurrency.lockutils [req-305e12dc-a062-45c3-bc79-4ee3eda0f468 req-52981684-a4d7-477d-996a-c8dd79cb8df8 service nova] Acquiring lock "effc3987-45d0-4305-83a2-0eba47d2c7fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.523581] env[63241]: DEBUG oslo_concurrency.lockutils [req-305e12dc-a062-45c3-bc79-4ee3eda0f468 req-52981684-a4d7-477d-996a-c8dd79cb8df8 service nova] Lock "effc3987-45d0-4305-83a2-0eba47d2c7fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.523749] env[63241]: DEBUG oslo_concurrency.lockutils [req-305e12dc-a062-45c3-bc79-4ee3eda0f468 req-52981684-a4d7-477d-996a-c8dd79cb8df8 service nova] Lock "effc3987-45d0-4305-83a2-0eba47d2c7fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.523920] env[63241]: DEBUG nova.compute.manager [req-305e12dc-a062-45c3-bc79-4ee3eda0f468 req-52981684-a4d7-477d-996a-c8dd79cb8df8 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] No waiting events found dispatching network-vif-plugged-e6572a16-56c8-4c47-99ab-22d12f1f2ffc {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1645.524096] env[63241]: WARNING nova.compute.manager [req-305e12dc-a062-45c3-bc79-4ee3eda0f468 req-52981684-a4d7-477d-996a-c8dd79cb8df8 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Received unexpected event network-vif-plugged-e6572a16-56c8-4c47-99ab-22d12f1f2ffc for instance with vm_state building and task_state spawning. [ 1645.621823] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820578, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.625440] env[63241]: DEBUG nova.network.neutron [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Successfully updated port: e6572a16-56c8-4c47-99ab-22d12f1f2ffc {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1645.663064] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820580, 'name': ReconfigVM_Task, 'duration_secs': 0.434143} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.663648] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 14af9f82-525e-453c-8dc5-ef5b13c67ee4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1645.664517] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c60538-93b2-41a1-ba9e-2c0d238a6938 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.689082] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-099c407b-c5de-4382-b1ea-9ba7ca627220 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.706232] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1645.706232] env[63241]: value = "task-1820581" [ 1645.706232] env[63241]: _type = "Task" [ 1645.706232] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.715434] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820581, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.972055] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820579, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.996761] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "a1a8342a-b00e-42c1-8c01-a95659a78caf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.997037] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "a1a8342a-b00e-42c1-8c01-a95659a78caf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.997260] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "a1a8342a-b00e-42c1-8c01-a95659a78caf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.997460] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "a1a8342a-b00e-42c1-8c01-a95659a78caf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.997630] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "a1a8342a-b00e-42c1-8c01-a95659a78caf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.002837] env[63241]: INFO nova.compute.manager [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Terminating instance [ 1646.004790] env[63241]: DEBUG nova.compute.manager [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1646.005012] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1646.005887] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54b8759-57cb-48fe-add4-af6087436851 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.015572] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1646.015801] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ae9eef4-05f6-4f3b-93f2-e9d5578cf732 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.021625] env[63241]: DEBUG oslo_vmware.api [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1646.021625] env[63241]: value = "task-1820582" [ 1646.021625] env[63241]: _type = "Task" [ 1646.021625] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.029599] env[63241]: DEBUG oslo_vmware.api [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.122187] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820578, 'name': ReconfigVM_Task, 'duration_secs': 1.365098} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.123365] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Reconfigured VM instance instance-00000045 to attach disk [datastore1] cb7eb689-b8f6-479d-aa6b-c27fab16e131/cb7eb689-b8f6-479d-aa6b-c27fab16e131.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1646.124968] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff3d82c-0507-4caa-9ec8-624287d69268 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.129782] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dfd6296e-d897-42aa-92fd-2f533e212139 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.130406] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquiring lock "refresh_cache-effc3987-45d0-4305-83a2-0eba47d2c7fd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.130661] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquired lock "refresh_cache-effc3987-45d0-4305-83a2-0eba47d2c7fd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.130835] env[63241]: DEBUG nova.network.neutron [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1646.136892] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2914225c-a2e2-40d6-b492-aa42cc111e63 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.142029] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1646.142029] env[63241]: value = "task-1820583" [ 1646.142029] env[63241]: _type = "Task" [ 1646.142029] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.174151] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a59fcc4-17c1-4d47-a119-5d13c853a68a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.180755] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820583, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.186324] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1676c3d4-f3bb-43d5-aded-b5608480ff4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.200604] env[63241]: DEBUG nova.compute.provider_tree [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1646.216335] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820581, 'name': ReconfigVM_Task, 'duration_secs': 0.163024} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.216595] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1646.216835] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c9fe4fb-7468-4bd3-90ad-0552f930b46a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.222019] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1646.222019] env[63241]: value = "task-1820584" [ 1646.222019] env[63241]: _type = "Task" [ 1646.222019] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.229474] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820584, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.473748] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820579, 'name': RemoveSnapshot_Task, 'duration_secs': 1.297743} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.474119] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1646.474414] env[63241]: DEBUG nova.compute.manager [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1646.475197] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70f92b7-3208-4a30-992f-89eda18a30f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.531977] env[63241]: DEBUG oslo_vmware.api [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820582, 'name': PowerOffVM_Task, 'duration_secs': 0.196896} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.532208] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1646.532381] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1646.532630] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-192b7487-b4f9-49e1-b444-df07a36e00f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.652446] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820583, 'name': Rename_Task, 'duration_secs': 0.141968} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.652898] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1646.653019] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0bb2ef7-d423-4c35-a776-4269b03423c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.659908] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1646.659908] env[63241]: value = "task-1820586" [ 1646.659908] env[63241]: _type = "Task" [ 1646.659908] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.665245] env[63241]: DEBUG nova.network.neutron [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1646.669783] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820586, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.704053] env[63241]: DEBUG nova.scheduler.client.report [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1646.716738] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1646.716963] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1646.717168] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleting the datastore file [datastore1] a1a8342a-b00e-42c1-8c01-a95659a78caf {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1646.726626] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9f82065-ead3-4439-bc23-73fcd2a0300b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.726626] env[63241]: DEBUG oslo_vmware.api [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for the task: (returnval){ [ 1646.726626] env[63241]: value = "task-1820587" [ 1646.726626] env[63241]: _type = "Task" [ 1646.726626] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.735809] env[63241]: DEBUG oslo_vmware.api [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820584, 'name': PowerOnVM_Task, 'duration_secs': 0.392901} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.739338] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1646.741645] env[63241]: DEBUG oslo_vmware.api [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820587, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.742687] env[63241]: DEBUG nova.compute.manager [None req-b2988e88-960d-4a05-9b65-b9ce37638c5a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1646.743567] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c717c9-1a1e-4da2-99e5-310a45efbb89 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.874196] env[63241]: DEBUG nova.network.neutron [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Updating instance_info_cache with network_info: [{"id": "e6572a16-56c8-4c47-99ab-22d12f1f2ffc", "address": "fa:16:3e:9a:1b:e1", "network": {"id": "03cda1c6-e368-476f-84bd-f777dcc7a84b", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1396456098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9065e95ad1fe4fc1a9fd588c07f5609c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6572a16-56", "ovs_interfaceid": "e6572a16-56c8-4c47-99ab-22d12f1f2ffc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1646.987233] env[63241]: INFO nova.compute.manager [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Shelve offloading [ 1646.988793] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1646.989041] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62c25c86-2f88-4577-960d-ef844f22c349 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.996755] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1646.996755] env[63241]: value = "task-1820588" [ 1646.996755] env[63241]: _type = "Task" [ 1646.996755] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.006352] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820588, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.171801] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820586, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.211349] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.386s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.211922] env[63241]: DEBUG nova.compute.manager [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1647.214956] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.310s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.215245] env[63241]: DEBUG nova.objects.instance [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Lazy-loading 'resources' on Instance uuid cfdc6b34-6940-414f-b17d-6fe17f92474a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1647.236627] env[63241]: DEBUG oslo_vmware.api [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Task: {'id': task-1820587, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.330753} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.236869] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1647.237068] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1647.237256] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1647.237495] env[63241]: INFO nova.compute.manager [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1647.237773] env[63241]: DEBUG oslo.service.loopingcall [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1647.237968] env[63241]: DEBUG nova.compute.manager [-] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1647.238076] env[63241]: DEBUG nova.network.neutron [-] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1647.377097] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Releasing lock "refresh_cache-effc3987-45d0-4305-83a2-0eba47d2c7fd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.377481] env[63241]: DEBUG nova.compute.manager [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Instance network_info: |[{"id": "e6572a16-56c8-4c47-99ab-22d12f1f2ffc", "address": "fa:16:3e:9a:1b:e1", "network": {"id": "03cda1c6-e368-476f-84bd-f777dcc7a84b", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1396456098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9065e95ad1fe4fc1a9fd588c07f5609c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6572a16-56", "ovs_interfaceid": "e6572a16-56c8-4c47-99ab-22d12f1f2ffc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1647.377944] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:1b:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6572a16-56c8-4c47-99ab-22d12f1f2ffc', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1647.386254] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Creating folder: Project (9065e95ad1fe4fc1a9fd588c07f5609c). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1647.386469] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a7e2f97-864f-4cd1-bcbb-ce755847d826 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.397558] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Created folder: Project (9065e95ad1fe4fc1a9fd588c07f5609c) in parent group-v376927. [ 1647.397741] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Creating folder: Instances. Parent ref: group-v377120. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1647.397959] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ffc53fd3-04bf-43df-8c67-16d750278e42 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.406532] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Created folder: Instances in parent group-v377120. [ 1647.406715] env[63241]: DEBUG oslo.service.loopingcall [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1647.406895] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1647.407105] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-305ec09d-525e-43ae-95ee-d45d27b845cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.426179] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1647.426179] env[63241]: value = "task-1820591" [ 1647.426179] env[63241]: _type = "Task" [ 1647.426179] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.433847] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820591, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.508031] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1647.508031] env[63241]: DEBUG nova.compute.manager [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1647.508765] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43833f27-e49c-4eb1-9a2a-c1ff4e584817 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.515549] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.515752] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.515966] env[63241]: DEBUG nova.network.neutron [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1647.610176] env[63241]: DEBUG nova.compute.manager [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Received event network-changed-e6572a16-56c8-4c47-99ab-22d12f1f2ffc {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1647.610387] env[63241]: DEBUG nova.compute.manager [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Refreshing instance network info cache due to event network-changed-e6572a16-56c8-4c47-99ab-22d12f1f2ffc. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1647.610599] env[63241]: DEBUG oslo_concurrency.lockutils [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] Acquiring lock "refresh_cache-effc3987-45d0-4305-83a2-0eba47d2c7fd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.610739] env[63241]: DEBUG oslo_concurrency.lockutils [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] Acquired lock "refresh_cache-effc3987-45d0-4305-83a2-0eba47d2c7fd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.610907] env[63241]: DEBUG nova.network.neutron [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Refreshing network info cache for port e6572a16-56c8-4c47-99ab-22d12f1f2ffc {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1647.671651] env[63241]: DEBUG oslo_vmware.api [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820586, 'name': PowerOnVM_Task, 'duration_secs': 0.571926} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.671913] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1647.672125] env[63241]: INFO nova.compute.manager [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Took 9.15 seconds to spawn the instance on the hypervisor. [ 1647.672309] env[63241]: DEBUG nova.compute.manager [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1647.673097] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2509b510-ae32-4e8c-a89f-e63b256a6aea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.718634] env[63241]: DEBUG nova.compute.utils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1647.723993] env[63241]: DEBUG nova.compute.manager [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1647.723993] env[63241]: DEBUG nova.network.neutron [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1647.791950] env[63241]: DEBUG nova.policy [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ff916a4e9c6433e939d47c887fa3b98', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bfa018174324b20863367a034d512da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1647.936730] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820591, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.939536] env[63241]: DEBUG nova.network.neutron [-] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1648.097607] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813226b8-86d2-45cb-b30a-60b127b16add {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.106618] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f35370-1e0a-48ae-90d6-c7b346057c03 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.142642] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b3a844-a03a-4a23-af9e-50fd3deb19f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.151444] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a96a27-81c1-48b9-b778-0e2be7514655 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.170665] env[63241]: DEBUG nova.compute.provider_tree [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1648.194797] env[63241]: INFO nova.compute.manager [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Took 46.31 seconds to build instance. [ 1648.223322] env[63241]: DEBUG nova.compute.manager [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1648.337162] env[63241]: DEBUG nova.network.neutron [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Updating instance_info_cache with network_info: [{"id": "8754391e-9ab8-421b-995a-d10260d260c6", "address": "fa:16:3e:e1:e9:0a", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8754391e-9a", "ovs_interfaceid": "8754391e-9ab8-421b-995a-d10260d260c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1648.357124] env[63241]: DEBUG nova.network.neutron [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Successfully created port: 7b46f450-f9df-492c-bc52-8760f14afb90 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1648.436614] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820591, 'name': CreateVM_Task, 'duration_secs': 0.778662} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.436702] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1648.438759] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1648.438759] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1648.438759] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1648.442027] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d89db45-b7db-438d-827a-8a0644c6936b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.442553] env[63241]: INFO nova.compute.manager [-] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Took 1.20 seconds to deallocate network for instance. [ 1648.445516] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for the task: (returnval){ [ 1648.445516] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52aa719f-8d64-4e4c-6412-4ef3b890109c" [ 1648.445516] env[63241]: _type = "Task" [ 1648.445516] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.456540] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52aa719f-8d64-4e4c-6412-4ef3b890109c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.656759] env[63241]: DEBUG nova.network.neutron [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Updated VIF entry in instance network info cache for port e6572a16-56c8-4c47-99ab-22d12f1f2ffc. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1648.657144] env[63241]: DEBUG nova.network.neutron [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Updating instance_info_cache with network_info: [{"id": "e6572a16-56c8-4c47-99ab-22d12f1f2ffc", "address": "fa:16:3e:9a:1b:e1", "network": {"id": "03cda1c6-e368-476f-84bd-f777dcc7a84b", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1396456098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9065e95ad1fe4fc1a9fd588c07f5609c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6572a16-56", "ovs_interfaceid": "e6572a16-56c8-4c47-99ab-22d12f1f2ffc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1648.679051] env[63241]: DEBUG nova.scheduler.client.report [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1648.699500] env[63241]: DEBUG oslo_concurrency.lockutils [None req-08d71ede-4f37-4004-bff5-4353734efac9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.368s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.830250] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.955279] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.962238] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52aa719f-8d64-4e4c-6412-4ef3b890109c, 'name': SearchDatastore_Task, 'duration_secs': 0.015376} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.962638] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.962851] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1648.963135] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1648.963299] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1648.963622] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1648.963798] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0903ca6-514d-4cfd-8c66-3eebb43251e5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.978896] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1648.979115] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1648.983020] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c9fd66e-9822-4fe1-a215-4125004ff60c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.985673] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for the task: (returnval){ [ 1648.985673] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528f4a77-32c6-f332-63a3-655d5de9bfbc" [ 1648.985673] env[63241]: _type = "Task" [ 1648.985673] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.993869] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528f4a77-32c6-f332-63a3-655d5de9bfbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.162899] env[63241]: DEBUG oslo_concurrency.lockutils [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] Releasing lock "refresh_cache-effc3987-45d0-4305-83a2-0eba47d2c7fd" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1649.162899] env[63241]: DEBUG nova.compute.manager [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Received event network-vif-deleted-bc8209bb-1cd1-4efc-806c-3fb04ffc73c5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1649.162899] env[63241]: INFO nova.compute.manager [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Neutron deleted interface bc8209bb-1cd1-4efc-806c-3fb04ffc73c5; detaching it from the instance and deleting it from the info cache [ 1649.162899] env[63241]: DEBUG nova.network.neutron [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1649.183025] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.186267] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.522s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.187585] env[63241]: INFO nova.compute.claims [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1649.211104] env[63241]: INFO nova.scheduler.client.report [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Deleted allocations for instance cfdc6b34-6940-414f-b17d-6fe17f92474a [ 1649.237279] env[63241]: DEBUG nova.compute.manager [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1649.275866] env[63241]: DEBUG nova.virt.hardware [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1649.276130] env[63241]: DEBUG nova.virt.hardware [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1649.276295] env[63241]: DEBUG nova.virt.hardware [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1649.276481] env[63241]: DEBUG nova.virt.hardware [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1649.276630] env[63241]: DEBUG nova.virt.hardware [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1649.276779] env[63241]: DEBUG nova.virt.hardware [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1649.276986] env[63241]: DEBUG nova.virt.hardware [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1649.277163] env[63241]: DEBUG nova.virt.hardware [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1649.277336] env[63241]: DEBUG nova.virt.hardware [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1649.277506] env[63241]: DEBUG nova.virt.hardware [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1649.277684] env[63241]: DEBUG nova.virt.hardware [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1649.278854] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5104d30-7d5b-40ec-af55-7c2859111927 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.288529] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55a921e-b448-497e-85b0-d1cba7784360 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.293792] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1649.294632] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4854d38c-4487-420a-8b23-bc115bd5f91f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.311092] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1649.311344] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f77cc68-08cc-4f0c-9a19-d657113cdbb3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.484024] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1649.484024] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1649.484024] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleting the datastore file [datastore1] 1626092d-78ef-41b5-8b47-fb840d63e4f4 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1649.484283] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4a99a43-cf96-4b54-8d50-8be78b436423 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.495328] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528f4a77-32c6-f332-63a3-655d5de9bfbc, 'name': SearchDatastore_Task, 'duration_secs': 0.035763} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.497071] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1649.497071] env[63241]: value = "task-1820593" [ 1649.497071] env[63241]: _type = "Task" [ 1649.497071] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.497694] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4441d3ee-828e-4dcb-8ef0-9517b43ad3eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.504865] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for the task: (returnval){ [ 1649.504865] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f061ee-6b97-2f28-8137-aef7ab0b9c83" [ 1649.504865] env[63241]: _type = "Task" [ 1649.504865] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.510389] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820593, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.514836] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f061ee-6b97-2f28-8137-aef7ab0b9c83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.635900] env[63241]: DEBUG nova.compute.manager [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Received event network-vif-unplugged-8754391e-9ab8-421b-995a-d10260d260c6 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1649.636145] env[63241]: DEBUG oslo_concurrency.lockutils [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] Acquiring lock "1626092d-78ef-41b5-8b47-fb840d63e4f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.636419] env[63241]: DEBUG oslo_concurrency.lockutils [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.636730] env[63241]: DEBUG oslo_concurrency.lockutils [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.636890] env[63241]: DEBUG nova.compute.manager [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] No waiting events found dispatching network-vif-unplugged-8754391e-9ab8-421b-995a-d10260d260c6 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1649.637213] env[63241]: WARNING nova.compute.manager [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Received unexpected event network-vif-unplugged-8754391e-9ab8-421b-995a-d10260d260c6 for instance with vm_state shelved and task_state shelving_offloading. [ 1649.637956] env[63241]: DEBUG nova.compute.manager [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Received event network-changed-8754391e-9ab8-421b-995a-d10260d260c6 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1649.637956] env[63241]: DEBUG nova.compute.manager [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Refreshing instance network info cache due to event network-changed-8754391e-9ab8-421b-995a-d10260d260c6. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1649.637956] env[63241]: DEBUG oslo_concurrency.lockutils [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] Acquiring lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.637956] env[63241]: DEBUG oslo_concurrency.lockutils [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] Acquired lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.638189] env[63241]: DEBUG nova.network.neutron [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Refreshing network info cache for port 8754391e-9ab8-421b-995a-d10260d260c6 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1649.666857] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1249b3ea-65cb-45d4-9ef3-5e4cf32ef832 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.677062] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688502bd-4b78-4746-897e-ea2d6647f4e7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.709124] env[63241]: INFO nova.compute.manager [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Rescuing [ 1649.709424] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.709551] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.709717] env[63241]: DEBUG nova.network.neutron [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1649.711209] env[63241]: DEBUG nova.compute.manager [req-73556c73-b41a-4745-a280-bb183f5cb759 req-36a1086c-26ed-4d7b-9bfa-8d3568921263 service nova] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Detach interface failed, port_id=bc8209bb-1cd1-4efc-806c-3fb04ffc73c5, reason: Instance a1a8342a-b00e-42c1-8c01-a95659a78caf could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1649.720331] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a08cca60-45b3-4d20-86db-d5760edb3e90 tempest-ServerAddressesTestJSON-869799430 tempest-ServerAddressesTestJSON-869799430-project-member] Lock "cfdc6b34-6940-414f-b17d-6fe17f92474a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.384s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.016605] env[63241]: DEBUG oslo_vmware.api [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820593, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.419679} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.021772] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1650.021989] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1650.022178] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1650.028095] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f061ee-6b97-2f28-8137-aef7ab0b9c83, 'name': SearchDatastore_Task, 'duration_secs': 0.01914} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.028714] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.028970] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] effc3987-45d0-4305-83a2-0eba47d2c7fd/effc3987-45d0-4305-83a2-0eba47d2c7fd.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1650.029238] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d6969d8-353e-4dba-bc07-319dfccf94b5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.037495] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for the task: (returnval){ [ 1650.037495] env[63241]: value = "task-1820594" [ 1650.037495] env[63241]: _type = "Task" [ 1650.037495] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.046822] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.047694] env[63241]: INFO nova.scheduler.client.report [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted allocations for instance 1626092d-78ef-41b5-8b47-fb840d63e4f4 [ 1650.511332] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b8a64c-4914-4b9d-a271-250191f61511 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.519027] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab35fa66-10e5-4235-92b2-655a2893ccab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.558032] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.558032] env[63241]: DEBUG nova.network.neutron [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Updating instance_info_cache with network_info: [{"id": "e025d87f-adf8-4be9-91fa-85161ae568cf", "address": "fa:16:3e:e3:fb:11", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape025d87f-ad", "ovs_interfaceid": "e025d87f-adf8-4be9-91fa-85161ae568cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.564827] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177b3698-23fd-4a99-b200-4e55d373e7d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.568508] env[63241]: DEBUG nova.compute.manager [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Stashing vm_state: active {{(pid=63241) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1650.572432] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.584595] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820594, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.585950] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fb8091-2251-45c5-952c-c25a2ff11766 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.606629] env[63241]: DEBUG nova.compute.provider_tree [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1650.879326] env[63241]: DEBUG nova.network.neutron [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Successfully updated port: 7b46f450-f9df-492c-bc52-8760f14afb90 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1650.892454] env[63241]: DEBUG nova.network.neutron [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Updated VIF entry in instance network info cache for port 8754391e-9ab8-421b-995a-d10260d260c6. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1650.892828] env[63241]: DEBUG nova.network.neutron [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Updating instance_info_cache with network_info: [{"id": "8754391e-9ab8-421b-995a-d10260d260c6", "address": "fa:16:3e:e1:e9:0a", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": null, "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8754391e-9a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1651.062572] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820594, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.874608} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.062845] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] effc3987-45d0-4305-83a2-0eba47d2c7fd/effc3987-45d0-4305-83a2-0eba47d2c7fd.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1651.063070] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1651.067285] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0727e438-d3b7-4893-bdd7-d07e8a7b14fc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.074339] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for the task: (returnval){ [ 1651.074339] env[63241]: value = "task-1820595" [ 1651.074339] env[63241]: _type = "Task" [ 1651.074339] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.093243] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820595, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.098830] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.111254] env[63241]: DEBUG nova.scheduler.client.report [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1651.118749] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1651.119322] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0fb3dcb-e04e-46c1-a7ba-f0e3b1b8342d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.126724] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1651.126724] env[63241]: value = "task-1820596" [ 1651.126724] env[63241]: _type = "Task" [ 1651.126724] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.137551] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820596, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.385252] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1651.385397] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquired lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1651.385548] env[63241]: DEBUG nova.network.neutron [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1651.396098] env[63241]: DEBUG oslo_concurrency.lockutils [req-7c31d6e1-d0a4-48f9-abbe-82c08c74fb55 req-f3d4f642-f381-4279-8e4e-699d4ec98dab service nova] Releasing lock "refresh_cache-1626092d-78ef-41b5-8b47-fb840d63e4f4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1651.588148] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820595, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125956} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.588472] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1651.589624] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5200fe45-d928-4630-8182-dbdaef9aaad6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.613540] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] effc3987-45d0-4305-83a2-0eba47d2c7fd/effc3987-45d0-4305-83a2-0eba47d2c7fd.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1651.614895] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82c898e9-f3f3-4578-902e-9a57bf57de36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.629728] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.630246] env[63241]: DEBUG nova.compute.manager [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1651.633822] env[63241]: DEBUG oslo_concurrency.lockutils [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.788s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.634188] env[63241]: DEBUG nova.objects.instance [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lazy-loading 'resources' on Instance uuid f583adda-976e-4f79-adc7-0b4e1a73ad73 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1651.644960] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820596, 'name': PowerOffVM_Task, 'duration_secs': 0.225371} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.647040] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1651.647625] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for the task: (returnval){ [ 1651.647625] env[63241]: value = "task-1820597" [ 1651.647625] env[63241]: _type = "Task" [ 1651.647625] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.648365] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0602cea7-1e34-4763-9e39-99103791e814 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.677863] env[63241]: DEBUG nova.compute.manager [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Received event network-vif-plugged-7b46f450-f9df-492c-bc52-8760f14afb90 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1651.678100] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] Acquiring lock "29b6caa8-a07c-494b-b776-b08affa45c87-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.678308] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] Lock "29b6caa8-a07c-494b-b776-b08affa45c87-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.678478] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] Lock "29b6caa8-a07c-494b-b776-b08affa45c87-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.678649] env[63241]: DEBUG nova.compute.manager [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] No waiting events found dispatching network-vif-plugged-7b46f450-f9df-492c-bc52-8760f14afb90 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1651.678814] env[63241]: WARNING nova.compute.manager [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Received unexpected event network-vif-plugged-7b46f450-f9df-492c-bc52-8760f14afb90 for instance with vm_state building and task_state spawning. [ 1651.679030] env[63241]: DEBUG nova.compute.manager [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Received event network-changed-7b46f450-f9df-492c-bc52-8760f14afb90 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1651.679161] env[63241]: DEBUG nova.compute.manager [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Refreshing instance network info cache due to event network-changed-7b46f450-f9df-492c-bc52-8760f14afb90. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1651.679341] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] Acquiring lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1651.680035] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d568bd8-e5dd-4756-a634-63fe71c3c44c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.683196] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820597, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.712757] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1651.713055] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a247ad7c-70b5-407a-a482-f643e43b9855 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.720815] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1651.720815] env[63241]: value = "task-1820598" [ 1651.720815] env[63241]: _type = "Task" [ 1651.720815] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.728547] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820598, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.933587] env[63241]: DEBUG nova.network.neutron [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1652.085495] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "1626092d-78ef-41b5-8b47-fb840d63e4f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.135722] env[63241]: DEBUG nova.compute.utils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1652.137151] env[63241]: DEBUG nova.compute.manager [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1652.137329] env[63241]: DEBUG nova.network.neutron [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1652.145112] env[63241]: DEBUG nova.network.neutron [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Updating instance_info_cache with network_info: [{"id": "7b46f450-f9df-492c-bc52-8760f14afb90", "address": "fa:16:3e:e6:47:b4", "network": {"id": "a8a8022c-fd6d-4145-af3c-875a85e306e6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-472658781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bfa018174324b20863367a034d512da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b46f450-f9", "ovs_interfaceid": "7b46f450-f9df-492c-bc52-8760f14afb90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1652.164988] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820597, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.219121] env[63241]: DEBUG nova.policy [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa971675dc4440df813844c1ed2f2444', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e6e05a4fd294679b512d6a4dcfebd3f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1652.236602] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1652.236925] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1652.237318] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1652.237402] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1652.237601] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1652.238087] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d94c0757-9ca2-4f7b-80a3-fa5b3514e094 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.249245] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1652.249460] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1652.253560] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd053dfe-a150-4779-846d-07f2b405271d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.260284] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1652.260284] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5266a586-356f-bb3c-b917-eb48b058ff48" [ 1652.260284] env[63241]: _type = "Task" [ 1652.260284] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.269117] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5266a586-356f-bb3c-b917-eb48b058ff48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.475934] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10610e18-0e34-4a28-8ab9-bc8be60fefff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.483653] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94810aab-9617-4e81-a5fb-68da1b43b8a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.517399] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ae6f4e-27dd-4cf8-87af-7045df1575f9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.525726] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5372fbe-55c5-4a26-9848-6a8437ead879 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.542709] env[63241]: DEBUG nova.compute.provider_tree [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1652.556173] env[63241]: DEBUG nova.network.neutron [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Successfully created port: 56f1b482-fc2c-45e5-9aca-99ff209a166e {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1652.640761] env[63241]: DEBUG nova.compute.manager [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1652.647397] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Releasing lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.647697] env[63241]: DEBUG nova.compute.manager [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Instance network_info: |[{"id": "7b46f450-f9df-492c-bc52-8760f14afb90", "address": "fa:16:3e:e6:47:b4", "network": {"id": "a8a8022c-fd6d-4145-af3c-875a85e306e6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-472658781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bfa018174324b20863367a034d512da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b46f450-f9", "ovs_interfaceid": "7b46f450-f9df-492c-bc52-8760f14afb90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1652.647984] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] Acquired lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1652.648182] env[63241]: DEBUG nova.network.neutron [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Refreshing network info cache for port 7b46f450-f9df-492c-bc52-8760f14afb90 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1652.649264] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:47:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1559ce49-7345-443f-bf02-4bfeb88356ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b46f450-f9df-492c-bc52-8760f14afb90', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1652.657428] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Creating folder: Project (4bfa018174324b20863367a034d512da). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1652.661174] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-131de9d4-a017-4ae7-9223-24adc7dbd1d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.673538] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820597, 'name': ReconfigVM_Task, 'duration_secs': 0.769744} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.673806] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Reconfigured VM instance instance-00000046 to attach disk [datastore1] effc3987-45d0-4305-83a2-0eba47d2c7fd/effc3987-45d0-4305-83a2-0eba47d2c7fd.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1652.674471] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d545ee5f-ad96-48b8-8b82-81e71d130c7d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.677161] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Created folder: Project (4bfa018174324b20863367a034d512da) in parent group-v376927. [ 1652.677362] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Creating folder: Instances. Parent ref: group-v377123. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1652.677901] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b23c306a-8652-4c0b-bdcc-cf4fb501f70f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.682259] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for the task: (returnval){ [ 1652.682259] env[63241]: value = "task-1820600" [ 1652.682259] env[63241]: _type = "Task" [ 1652.682259] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.687374] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Created folder: Instances in parent group-v377123. [ 1652.687602] env[63241]: DEBUG oslo.service.loopingcall [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1652.688163] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1652.690669] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7c5adea-a7f1-4cb7-8931-2cc707449462 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.705087] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820600, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.709378] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1652.709378] env[63241]: value = "task-1820602" [ 1652.709378] env[63241]: _type = "Task" [ 1652.709378] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.720454] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820602, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.772414] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5266a586-356f-bb3c-b917-eb48b058ff48, 'name': SearchDatastore_Task, 'duration_secs': 0.013212} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.773510] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efbbf000-19ad-48f4-a74a-2a5effd6ab17 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.779271] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1652.779271] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5201c797-19f3-4ca6-766b-9bf1f800d77c" [ 1652.779271] env[63241]: _type = "Task" [ 1652.779271] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.787885] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5201c797-19f3-4ca6-766b-9bf1f800d77c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.047296] env[63241]: DEBUG nova.scheduler.client.report [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1653.099960] env[63241]: DEBUG nova.network.neutron [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Updated VIF entry in instance network info cache for port 7b46f450-f9df-492c-bc52-8760f14afb90. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1653.104230] env[63241]: DEBUG nova.network.neutron [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Updating instance_info_cache with network_info: [{"id": "7b46f450-f9df-492c-bc52-8760f14afb90", "address": "fa:16:3e:e6:47:b4", "network": {"id": "a8a8022c-fd6d-4145-af3c-875a85e306e6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-472658781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bfa018174324b20863367a034d512da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b46f450-f9", "ovs_interfaceid": "7b46f450-f9df-492c-bc52-8760f14afb90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1653.195675] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820600, 'name': Rename_Task, 'duration_secs': 0.279233} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.196060] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1653.196341] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a367f293-ac4d-4035-997a-0af1dc50a2a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.203861] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for the task: (returnval){ [ 1653.203861] env[63241]: value = "task-1820603" [ 1653.203861] env[63241]: _type = "Task" [ 1653.203861] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.215521] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820603, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.224328] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820602, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.290702] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5201c797-19f3-4ca6-766b-9bf1f800d77c, 'name': SearchDatastore_Task, 'duration_secs': 0.010751} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.290991] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1653.291321] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] fb5d60fa-fa13-44a1-8291-4645761a0c80/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. {{(pid=63241) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1653.291542] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-85b42f4d-2987-49c2-a214-8d4faaecc9d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.298133] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1653.298133] env[63241]: value = "task-1820604" [ 1653.298133] env[63241]: _type = "Task" [ 1653.298133] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.307305] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.551383] env[63241]: DEBUG oslo_concurrency.lockutils [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.917s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.553711] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.977s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.555212] env[63241]: INFO nova.compute.claims [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1653.580557] env[63241]: INFO nova.scheduler.client.report [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleted allocations for instance f583adda-976e-4f79-adc7-0b4e1a73ad73 [ 1653.608390] env[63241]: DEBUG oslo_concurrency.lockutils [req-ab4952c6-f870-4cc9-9d2f-e3a4855cca98 req-ff12ff79-8f50-4dc9-a98e-5d0064034e7f service nova] Releasing lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1653.650927] env[63241]: DEBUG nova.compute.manager [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1653.681691] env[63241]: DEBUG nova.virt.hardware [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:28:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='6d3f31be-51b1-4783-a8b9-92005f2fb457',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-13492411',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1653.682036] env[63241]: DEBUG nova.virt.hardware [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1653.682258] env[63241]: DEBUG nova.virt.hardware [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1653.682528] env[63241]: DEBUG nova.virt.hardware [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1653.682731] env[63241]: DEBUG nova.virt.hardware [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1653.682938] env[63241]: DEBUG nova.virt.hardware [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1653.683248] env[63241]: DEBUG nova.virt.hardware [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1653.683484] env[63241]: DEBUG nova.virt.hardware [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1653.683713] env[63241]: DEBUG nova.virt.hardware [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1653.683940] env[63241]: DEBUG nova.virt.hardware [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1653.684193] env[63241]: DEBUG nova.virt.hardware [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1653.685561] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237b8202-63c2-4577-9ecd-7575fa367a49 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.696785] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d8fde0-f52c-4f2f-8411-d8ae2101b781 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.724550] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820603, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.729766] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820602, 'name': CreateVM_Task, 'duration_secs': 0.609731} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.729971] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1653.730684] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.730829] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.731147] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1653.731412] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0987208-3e35-4110-963a-a35238f2223e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.736379] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1653.736379] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5206d4d7-1770-a7c5-dbdb-1d4b4412702b" [ 1653.736379] env[63241]: _type = "Task" [ 1653.736379] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.745180] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5206d4d7-1770-a7c5-dbdb-1d4b4412702b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.809527] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820604, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.089977] env[63241]: DEBUG oslo_concurrency.lockutils [None req-15722a5d-6634-42b0-93a5-be9193d14b3b tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "f583adda-976e-4f79-adc7-0b4e1a73ad73" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.515s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.220516] env[63241]: DEBUG oslo_vmware.api [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820603, 'name': PowerOnVM_Task, 'duration_secs': 0.631717} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.220870] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1654.221390] env[63241]: INFO nova.compute.manager [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Took 9.36 seconds to spawn the instance on the hypervisor. [ 1654.221612] env[63241]: DEBUG nova.compute.manager [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1654.222522] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ab7e87-1704-4474-9691-049b623501ba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.249208] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5206d4d7-1770-a7c5-dbdb-1d4b4412702b, 'name': SearchDatastore_Task, 'duration_secs': 0.124124} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.249654] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.249942] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1654.250620] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1654.250620] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1654.250799] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1654.250997] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4b9d49c-c159-4b0f-9c5b-de787cd4a719 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.264139] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1654.264383] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1654.265202] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2cee067-bf32-4bfc-a313-7128defd180a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.271122] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1654.271122] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b83c14-5918-84da-7ebc-a6058fff6f38" [ 1654.271122] env[63241]: _type = "Task" [ 1654.271122] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.280385] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b83c14-5918-84da-7ebc-a6058fff6f38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.310522] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820604, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.661911] env[63241]: DEBUG nova.network.neutron [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Successfully updated port: 56f1b482-fc2c-45e5-9aca-99ff209a166e {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1654.748104] env[63241]: INFO nova.compute.manager [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Took 26.25 seconds to build instance. [ 1654.790286] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b83c14-5918-84da-7ebc-a6058fff6f38, 'name': SearchDatastore_Task, 'duration_secs': 0.018862} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.794152] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3a96d88-8430-4005-a830-dfa7b9bafb33 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.804044] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1654.804044] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52421c6c-846f-05d0-ea3e-549af00e43b3" [ 1654.804044] env[63241]: _type = "Task" [ 1654.804044] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.824374] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52421c6c-846f-05d0-ea3e-549af00e43b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.825852] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820604, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.834031] env[63241]: DEBUG nova.compute.manager [req-b0ab8c6c-b79e-4844-a580-34c56046cba0 req-2df233be-e0fe-4233-8387-941a1a83d7c6 service nova] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Received event network-vif-plugged-56f1b482-fc2c-45e5-9aca-99ff209a166e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1654.834325] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0ab8c6c-b79e-4844-a580-34c56046cba0 req-2df233be-e0fe-4233-8387-941a1a83d7c6 service nova] Acquiring lock "7f1710d0-857d-41fc-8151-8c5e129dda08-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.834480] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0ab8c6c-b79e-4844-a580-34c56046cba0 req-2df233be-e0fe-4233-8387-941a1a83d7c6 service nova] Lock "7f1710d0-857d-41fc-8151-8c5e129dda08-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.834641] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0ab8c6c-b79e-4844-a580-34c56046cba0 req-2df233be-e0fe-4233-8387-941a1a83d7c6 service nova] Lock "7f1710d0-857d-41fc-8151-8c5e129dda08-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.834801] env[63241]: DEBUG nova.compute.manager [req-b0ab8c6c-b79e-4844-a580-34c56046cba0 req-2df233be-e0fe-4233-8387-941a1a83d7c6 service nova] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] No waiting events found dispatching network-vif-plugged-56f1b482-fc2c-45e5-9aca-99ff209a166e {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1654.835235] env[63241]: WARNING nova.compute.manager [req-b0ab8c6c-b79e-4844-a580-34c56046cba0 req-2df233be-e0fe-4233-8387-941a1a83d7c6 service nova] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Received unexpected event network-vif-plugged-56f1b482-fc2c-45e5-9aca-99ff209a166e for instance with vm_state building and task_state spawning. [ 1654.966196] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc6807b-fc1d-44ea-89a9-918629ecb9ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.976307] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedcf832-0885-4c9e-8108-17b2fd9c2f62 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.012619] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00606a8c-efc9-431f-bcd8-4d1a0f318ccd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.021180] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eecd769-319f-4c8d-ad99-b1a4f6b322e8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.039125] env[63241]: DEBUG nova.compute.provider_tree [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1655.162055] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1655.162299] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1655.162485] env[63241]: DEBUG nova.network.neutron [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1655.251377] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51261cf8-dadf-43c0-ba5d-b029d2970440 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Lock "effc3987-45d0-4305-83a2-0eba47d2c7fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.775s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.313674] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820604, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.806583} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.314424] env[63241]: INFO nova.virt.vmwareapi.ds_util [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] fb5d60fa-fa13-44a1-8291-4645761a0c80/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. [ 1655.315360] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a16d0a-c7cc-4c34-90ab-d3fd476fbd97 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.322030] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52421c6c-846f-05d0-ea3e-549af00e43b3, 'name': SearchDatastore_Task, 'duration_secs': 0.033299} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.322705] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1655.323019] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 29b6caa8-a07c-494b-b776-b08affa45c87/29b6caa8-a07c-494b-b776-b08affa45c87.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1655.323362] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55058862-33bf-42ae-a0bf-97a03cd8e723 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.349714] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] fb5d60fa-fa13-44a1-8291-4645761a0c80/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1655.351720] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28051e1d-b18e-4c1e-a87b-881d941f318d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.369930] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1655.369930] env[63241]: value = "task-1820605" [ 1655.369930] env[63241]: _type = "Task" [ 1655.369930] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.377463] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1655.377463] env[63241]: value = "task-1820606" [ 1655.377463] env[63241]: _type = "Task" [ 1655.377463] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.383766] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820605, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.387566] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820606, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.543116] env[63241]: DEBUG nova.scheduler.client.report [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1655.550683] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.551083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.727105] env[63241]: DEBUG nova.network.neutron [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1655.875016] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "7158c64a-5036-419b-b110-7e22c12bf3dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.875274] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "7158c64a-5036-419b-b110-7e22c12bf3dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.875477] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "7158c64a-5036-419b-b110-7e22c12bf3dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.876050] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "7158c64a-5036-419b-b110-7e22c12bf3dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.876050] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "7158c64a-5036-419b-b110-7e22c12bf3dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.884331] env[63241]: INFO nova.compute.manager [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Terminating instance [ 1655.888015] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820605, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.888659] env[63241]: DEBUG nova.compute.manager [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1655.888843] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1655.890197] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5485b3-3857-4d2b-890f-3cc9109b9243 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.896215] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820606, 'name': ReconfigVM_Task, 'duration_secs': 0.327198} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.896951] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Reconfigured VM instance instance-00000043 to attach disk [datastore1] fb5d60fa-fa13-44a1-8291-4645761a0c80/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1655.897648] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bef2513-ee58-48cc-9500-f46607a3d5c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.902134] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1655.902676] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a22b7ebd-ad6e-46d4-95d9-636d249a7d7a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.927362] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88f4c211-80cd-4d96-89c4-813c970d343a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.937796] env[63241]: DEBUG oslo_vmware.api [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1655.937796] env[63241]: value = "task-1820607" [ 1655.937796] env[63241]: _type = "Task" [ 1655.937796] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.942838] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1655.942838] env[63241]: value = "task-1820608" [ 1655.942838] env[63241]: _type = "Task" [ 1655.942838] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.949182] env[63241]: DEBUG oslo_vmware.api [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820607, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.953970] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820608, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.959824] env[63241]: DEBUG nova.network.neutron [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance_info_cache with network_info: [{"id": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "address": "fa:16:3e:72:df:ac", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.145", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f1b482-fc", "ovs_interfaceid": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.051807] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.052392] env[63241]: DEBUG nova.compute.manager [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1656.055200] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.126s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.057309] env[63241]: INFO nova.compute.claims [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1656.064360] env[63241]: DEBUG nova.compute.manager [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1656.384050] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820605, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.927666} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.384596] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 29b6caa8-a07c-494b-b776-b08affa45c87/29b6caa8-a07c-494b-b776-b08affa45c87.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1656.384907] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1656.385240] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3260f15-7d5e-431e-8087-428b2281a0e7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.392349] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1656.392349] env[63241]: value = "task-1820609" [ 1656.392349] env[63241]: _type = "Task" [ 1656.392349] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.400258] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820609, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.453964] env[63241]: DEBUG oslo_vmware.api [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820607, 'name': PowerOffVM_Task, 'duration_secs': 0.328213} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.457222] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1656.457402] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1656.458043] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820608, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.458043] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-412559c7-c7cc-432c-957a-d4dd588d081e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.463476] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.463583] env[63241]: DEBUG nova.compute.manager [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Instance network_info: |[{"id": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "address": "fa:16:3e:72:df:ac", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.145", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f1b482-fc", "ovs_interfaceid": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1656.463957] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:df:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56f1b482-fc2c-45e5-9aca-99ff209a166e', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1656.471718] env[63241]: DEBUG oslo.service.loopingcall [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1656.471928] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1656.472169] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b56c768-f85d-4c10-b1f8-6559c68fb433 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.491077] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1656.491077] env[63241]: value = "task-1820611" [ 1656.491077] env[63241]: _type = "Task" [ 1656.491077] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.498680] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.573422] env[63241]: DEBUG nova.compute.utils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1656.576970] env[63241]: DEBUG nova.compute.manager [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1656.576970] env[63241]: DEBUG nova.network.neutron [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1656.594958] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.655702] env[63241]: DEBUG nova.policy [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78657a2bc34d4bb9922678ed287530f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18094134f49b4e84b83e97631bc22903', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1656.705553] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "c8f1ce16-70b7-41fd-8516-63198139c1cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.705656] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "c8f1ce16-70b7-41fd-8516-63198139c1cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.865950] env[63241]: DEBUG nova.compute.manager [req-e9f04949-99a6-448c-9745-f39ec2cb3e2a req-bbb598da-6e9d-487a-8f19-cc0123da73f2 service nova] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Received event network-changed-56f1b482-fc2c-45e5-9aca-99ff209a166e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1656.866310] env[63241]: DEBUG nova.compute.manager [req-e9f04949-99a6-448c-9745-f39ec2cb3e2a req-bbb598da-6e9d-487a-8f19-cc0123da73f2 service nova] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Refreshing instance network info cache due to event network-changed-56f1b482-fc2c-45e5-9aca-99ff209a166e. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1656.866680] env[63241]: DEBUG oslo_concurrency.lockutils [req-e9f04949-99a6-448c-9745-f39ec2cb3e2a req-bbb598da-6e9d-487a-8f19-cc0123da73f2 service nova] Acquiring lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.866926] env[63241]: DEBUG oslo_concurrency.lockutils [req-e9f04949-99a6-448c-9745-f39ec2cb3e2a req-bbb598da-6e9d-487a-8f19-cc0123da73f2 service nova] Acquired lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.867254] env[63241]: DEBUG nova.network.neutron [req-e9f04949-99a6-448c-9745-f39ec2cb3e2a req-bbb598da-6e9d-487a-8f19-cc0123da73f2 service nova] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Refreshing network info cache for port 56f1b482-fc2c-45e5-9aca-99ff209a166e {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1656.904316] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820609, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071984} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.904599] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1656.905445] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598305d2-03df-4f82-b059-edc7928690dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.930092] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 29b6caa8-a07c-494b-b776-b08affa45c87/29b6caa8-a07c-494b-b776-b08affa45c87.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1656.930525] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a536ac5d-6de5-4de0-ae4d-6db05419fe90 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.951251] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1656.951251] env[63241]: value = "task-1820612" [ 1656.951251] env[63241]: _type = "Task" [ 1656.951251] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.954606] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820608, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.962154] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820612, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.001415] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.054521] env[63241]: DEBUG nova.network.neutron [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Successfully created port: febed88f-91b2-4546-82de-5dd1a1f73020 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1657.077370] env[63241]: DEBUG nova.compute.manager [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1657.209916] env[63241]: DEBUG nova.compute.manager [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1657.301845] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquiring lock "effc3987-45d0-4305-83a2-0eba47d2c7fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.302143] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Lock "effc3987-45d0-4305-83a2-0eba47d2c7fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.302343] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquiring lock "effc3987-45d0-4305-83a2-0eba47d2c7fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.303015] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Lock "effc3987-45d0-4305-83a2-0eba47d2c7fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.303015] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Lock "effc3987-45d0-4305-83a2-0eba47d2c7fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.305494] env[63241]: INFO nova.compute.manager [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Terminating instance [ 1657.307339] env[63241]: DEBUG nova.compute.manager [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1657.308258] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1657.308683] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7839cbc0-d973-442d-8b67-83997e607ed5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.318922] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1657.319176] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7018324d-a966-401b-80bd-d88b049f3230 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.327320] env[63241]: DEBUG oslo_vmware.api [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for the task: (returnval){ [ 1657.327320] env[63241]: value = "task-1820613" [ 1657.327320] env[63241]: _type = "Task" [ 1657.327320] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.335199] env[63241]: DEBUG oslo_vmware.api [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820613, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.428908] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c903d0c6-686f-414f-b2cc-fc760d68d876 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.443711] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab2ba1a-14b7-4ede-9064-b793ce46de85 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.456954] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820608, 'name': ReconfigVM_Task, 'duration_secs': 1.171603} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.491023] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1657.491023] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac01a6a9-0eba-4c2e-8188-4f546b084309 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.491546] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e02d471-e9a4-4ef5-8431-cbb2fbab01dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.501451] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820612, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.505211] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1657.505211] env[63241]: value = "task-1820614" [ 1657.505211] env[63241]: _type = "Task" [ 1657.505211] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.506499] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90bd388-f5f1-4efa-ae4e-c72b8103d6e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.516621] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.529971] env[63241]: DEBUG nova.compute.provider_tree [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1657.534722] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820614, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.729920] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.773388] env[63241]: DEBUG nova.network.neutron [req-e9f04949-99a6-448c-9745-f39ec2cb3e2a req-bbb598da-6e9d-487a-8f19-cc0123da73f2 service nova] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updated VIF entry in instance network info cache for port 56f1b482-fc2c-45e5-9aca-99ff209a166e. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1657.773739] env[63241]: DEBUG nova.network.neutron [req-e9f04949-99a6-448c-9745-f39ec2cb3e2a req-bbb598da-6e9d-487a-8f19-cc0123da73f2 service nova] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance_info_cache with network_info: [{"id": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "address": "fa:16:3e:72:df:ac", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.145", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f1b482-fc", "ovs_interfaceid": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1657.836841] env[63241]: DEBUG oslo_vmware.api [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820613, 'name': PowerOffVM_Task, 'duration_secs': 0.226218} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.837200] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1657.837466] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1657.837741] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54b0d4cf-876a-4350-99e6-db83e9babbcf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.966898] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820612, 'name': ReconfigVM_Task, 'duration_secs': 0.590054} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.967140] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 29b6caa8-a07c-494b-b776-b08affa45c87/29b6caa8-a07c-494b-b776-b08affa45c87.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1657.967773] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b278c1a-f981-445e-b143-f60927994b86 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.973809] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1657.973809] env[63241]: value = "task-1820616" [ 1657.973809] env[63241]: _type = "Task" [ 1657.973809] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.981278] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820616, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.002712] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.014503] env[63241]: DEBUG oslo_vmware.api [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820614, 'name': PowerOnVM_Task, 'duration_secs': 0.505336} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.014765] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1658.017438] env[63241]: DEBUG nova.compute.manager [None req-9538a240-059a-41f5-b036-3edef40699b1 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1658.018189] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52bbb8d-f7e7-42f0-8ae6-cf945430fe03 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.039796] env[63241]: DEBUG nova.scheduler.client.report [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1658.087538] env[63241]: DEBUG nova.compute.manager [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1658.114937] env[63241]: DEBUG nova.virt.hardware [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1658.115301] env[63241]: DEBUG nova.virt.hardware [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1658.115404] env[63241]: DEBUG nova.virt.hardware [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1658.115601] env[63241]: DEBUG nova.virt.hardware [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1658.115766] env[63241]: DEBUG nova.virt.hardware [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1658.115922] env[63241]: DEBUG nova.virt.hardware [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1658.116145] env[63241]: DEBUG nova.virt.hardware [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1658.116312] env[63241]: DEBUG nova.virt.hardware [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1658.116482] env[63241]: DEBUG nova.virt.hardware [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1658.116696] env[63241]: DEBUG nova.virt.hardware [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1658.116819] env[63241]: DEBUG nova.virt.hardware [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1658.117711] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f253af-3485-4a28-87fc-e1c1580e3c79 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.126404] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdbaa0c-886d-4bb9-8dcf-36ade2010995 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.276991] env[63241]: DEBUG oslo_concurrency.lockutils [req-e9f04949-99a6-448c-9745-f39ec2cb3e2a req-bbb598da-6e9d-487a-8f19-cc0123da73f2 service nova] Releasing lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1658.483591] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820616, 'name': Rename_Task, 'duration_secs': 0.171} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.483947] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1658.484111] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d67da09b-6176-4abc-93be-6641c6f40613 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.489580] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1658.489580] env[63241]: value = "task-1820617" [ 1658.489580] env[63241]: _type = "Task" [ 1658.489580] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.496966] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820617, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.504101] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.545288] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.545831] env[63241]: DEBUG nova.compute.manager [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1658.548796] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.386s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.550183] env[63241]: INFO nova.compute.claims [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1658.999958] env[63241]: DEBUG oslo_vmware.api [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820617, 'name': PowerOnVM_Task, 'duration_secs': 0.455465} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.004233] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1659.004233] env[63241]: INFO nova.compute.manager [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Took 9.77 seconds to spawn the instance on the hypervisor. [ 1659.004233] env[63241]: DEBUG nova.compute.manager [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1659.004458] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e738d882-2085-4cd8-b129-168d16721eba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.011232] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.054932] env[63241]: DEBUG nova.compute.utils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1659.058434] env[63241]: DEBUG nova.compute.manager [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Not allocating networking since 'none' was specified. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1659.473885] env[63241]: INFO nova.compute.manager [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Unrescuing [ 1659.474113] env[63241]: DEBUG oslo_concurrency.lockutils [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.474392] env[63241]: DEBUG oslo_concurrency.lockutils [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.474694] env[63241]: DEBUG nova.network.neutron [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1659.509826] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.525021] env[63241]: INFO nova.compute.manager [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Took 29.06 seconds to build instance. [ 1659.559014] env[63241]: DEBUG nova.compute.manager [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1659.852696] env[63241]: DEBUG nova.compute.manager [req-e9d2cb54-098e-4c5e-b208-b68e0796d3fd req-5b836382-7990-4615-8500-99f8cc6b11b9 service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Received event network-changed-7b46f450-f9df-492c-bc52-8760f14afb90 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1659.852830] env[63241]: DEBUG nova.compute.manager [req-e9d2cb54-098e-4c5e-b208-b68e0796d3fd req-5b836382-7990-4615-8500-99f8cc6b11b9 service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Refreshing instance network info cache due to event network-changed-7b46f450-f9df-492c-bc52-8760f14afb90. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1659.853089] env[63241]: DEBUG oslo_concurrency.lockutils [req-e9d2cb54-098e-4c5e-b208-b68e0796d3fd req-5b836382-7990-4615-8500-99f8cc6b11b9 service nova] Acquiring lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.853207] env[63241]: DEBUG oslo_concurrency.lockutils [req-e9d2cb54-098e-4c5e-b208-b68e0796d3fd req-5b836382-7990-4615-8500-99f8cc6b11b9 service nova] Acquired lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.853372] env[63241]: DEBUG nova.network.neutron [req-e9d2cb54-098e-4c5e-b208-b68e0796d3fd req-5b836382-7990-4615-8500-99f8cc6b11b9 service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Refreshing network info cache for port 7b46f450-f9df-492c-bc52-8760f14afb90 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1659.908760] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d9b36a-d88b-4ae5-8a34-9c29f16c119b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.916720] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266009f2-7b93-408b-b60a-d449f7808b74 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.948050] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3e53ba-539f-4e14-8c76-230bf09b9235 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.955521] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79fe9c6-b87a-4ed2-9c8f-3c92d6bd3583 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.969295] env[63241]: DEBUG nova.compute.provider_tree [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1660.009189] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.025255] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d47d7d3-4482-4f25-afca-9b1f766ce6fe tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.581s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1660.285802] env[63241]: DEBUG nova.network.neutron [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Updating instance_info_cache with network_info: [{"id": "e025d87f-adf8-4be9-91fa-85161ae568cf", "address": "fa:16:3e:e3:fb:11", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape025d87f-ad", "ovs_interfaceid": "e025d87f-adf8-4be9-91fa-85161ae568cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1660.475016] env[63241]: DEBUG nova.scheduler.client.report [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1660.511435] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.575966] env[63241]: DEBUG nova.compute.manager [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1660.602285] env[63241]: DEBUG nova.virt.hardware [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1660.602539] env[63241]: DEBUG nova.virt.hardware [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1660.602702] env[63241]: DEBUG nova.virt.hardware [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1660.602881] env[63241]: DEBUG nova.virt.hardware [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1660.603038] env[63241]: DEBUG nova.virt.hardware [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1660.603190] env[63241]: DEBUG nova.virt.hardware [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1660.603578] env[63241]: DEBUG nova.virt.hardware [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1660.603657] env[63241]: DEBUG nova.virt.hardware [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1660.603788] env[63241]: DEBUG nova.virt.hardware [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1660.603955] env[63241]: DEBUG nova.virt.hardware [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1660.604149] env[63241]: DEBUG nova.virt.hardware [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1660.605040] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd90a23c-00b7-4dc2-880a-bc654f97206f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.613146] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da26e67d-8e67-4908-83e6-6e2fce572d1f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.627272] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1660.633759] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Creating folder: Project (ea4be900793d4564a147155c3c1742d6). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1660.634117] env[63241]: DEBUG nova.network.neutron [req-e9d2cb54-098e-4c5e-b208-b68e0796d3fd req-5b836382-7990-4615-8500-99f8cc6b11b9 service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Updated VIF entry in instance network info cache for port 7b46f450-f9df-492c-bc52-8760f14afb90. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1660.634794] env[63241]: DEBUG nova.network.neutron [req-e9d2cb54-098e-4c5e-b208-b68e0796d3fd req-5b836382-7990-4615-8500-99f8cc6b11b9 service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Updating instance_info_cache with network_info: [{"id": "7b46f450-f9df-492c-bc52-8760f14afb90", "address": "fa:16:3e:e6:47:b4", "network": {"id": "a8a8022c-fd6d-4145-af3c-875a85e306e6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-472658781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bfa018174324b20863367a034d512da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b46f450-f9", "ovs_interfaceid": "7b46f450-f9df-492c-bc52-8760f14afb90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1660.635763] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f06a85fa-ee34-45fd-8754-5b5f76a8e631 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.646685] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Created folder: Project (ea4be900793d4564a147155c3c1742d6) in parent group-v376927. [ 1660.646872] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Creating folder: Instances. Parent ref: group-v377127. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1660.647358] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33246e6e-62ab-4583-9c04-1ab7fbbf2569 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.655797] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Created folder: Instances in parent group-v377127. [ 1660.655939] env[63241]: DEBUG oslo.service.loopingcall [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1660.656137] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1660.656326] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2f7e79d-9397-4714-8437-bf0984e0dd9f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.674031] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1660.674031] env[63241]: value = "task-1820620" [ 1660.674031] env[63241]: _type = "Task" [ 1660.674031] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.681613] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820620, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.789148] env[63241]: DEBUG oslo_concurrency.lockutils [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "refresh_cache-fb5d60fa-fa13-44a1-8291-4645761a0c80" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.789847] env[63241]: DEBUG nova.objects.instance [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lazy-loading 'flavor' on Instance uuid fb5d60fa-fa13-44a1-8291-4645761a0c80 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1660.978333] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1660.978970] env[63241]: DEBUG nova.compute.manager [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1660.981536] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.784s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.981761] env[63241]: DEBUG nova.objects.instance [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lazy-loading 'resources' on Instance uuid 20c7a1a1-4396-414f-a52c-06551722b6eb {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1661.011855] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.138491] env[63241]: DEBUG oslo_concurrency.lockutils [req-e9d2cb54-098e-4c5e-b208-b68e0796d3fd req-5b836382-7990-4615-8500-99f8cc6b11b9 service nova] Releasing lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.182834] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820620, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.296024] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7877f360-ae7b-40e6-ba39-3e83dacfd9b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.317119] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1661.317453] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa0efa96-1911-457f-a487-339e1d483277 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.324233] env[63241]: DEBUG oslo_vmware.api [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1661.324233] env[63241]: value = "task-1820621" [ 1661.324233] env[63241]: _type = "Task" [ 1661.324233] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.332915] env[63241]: DEBUG oslo_vmware.api [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.486197] env[63241]: DEBUG nova.compute.utils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1661.492113] env[63241]: DEBUG nova.compute.manager [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Not allocating networking since 'none' was specified. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1661.516711] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.685431] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820620, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.782013] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dd4eaf-2760-4162-a40a-0450be941332 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.789662] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da6fc94-d4ac-4fcb-b91e-d4e88fa3f209 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.820937] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86dbdc93-3d4a-474d-83f6-5aa67652b9a6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.831167] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ea6ba5-af71-4d2f-8618-ec42199c8d17 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.837667] env[63241]: DEBUG oslo_vmware.api [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820621, 'name': PowerOffVM_Task, 'duration_secs': 0.189208} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.838095] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1661.843473] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Reconfiguring VM instance instance-00000043 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1661.850932] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbbd2c5f-520c-436a-88c4-8976901b9f67 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.863533] env[63241]: DEBUG nova.compute.provider_tree [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1661.869430] env[63241]: DEBUG oslo_vmware.api [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1661.869430] env[63241]: value = "task-1820622" [ 1661.869430] env[63241]: _type = "Task" [ 1661.869430] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.878077] env[63241]: DEBUG oslo_vmware.api [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820622, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.993292] env[63241]: DEBUG nova.compute.manager [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1662.013184] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.191057] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820620, 'name': CreateVM_Task, 'duration_secs': 1.087493} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.191378] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1662.192018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1662.192268] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.192737] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1662.193102] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca90b63b-483e-47fd-af0c-e13fd81ec38e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.198702] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1662.198702] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b67611-e8c2-f597-1fa6-c979befca2ee" [ 1662.198702] env[63241]: _type = "Task" [ 1662.198702] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.208934] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b67611-e8c2-f597-1fa6-c979befca2ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.366851] env[63241]: DEBUG nova.scheduler.client.report [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1662.379716] env[63241]: DEBUG oslo_vmware.api [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820622, 'name': ReconfigVM_Task, 'duration_secs': 0.209508} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.380624] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Reconfigured VM instance instance-00000043 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1662.380848] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1662.381126] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79ae2b7a-fdf4-4fa6-905a-16fad28b9794 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.388312] env[63241]: DEBUG oslo_vmware.api [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1662.388312] env[63241]: value = "task-1820623" [ 1662.388312] env[63241]: _type = "Task" [ 1662.388312] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.396245] env[63241]: DEBUG oslo_vmware.api [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820623, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.513438] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.710282] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b67611-e8c2-f597-1fa6-c979befca2ee, 'name': SearchDatastore_Task, 'duration_secs': 0.019011} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.710640] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1662.710769] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1662.711034] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1662.711240] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.711498] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1662.711832] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfba0970-8bab-4060-816b-4a5f88cb52a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.721131] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1662.721131] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1662.721575] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b52fb4a7-3447-4542-89ad-69bd4abf4a4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.726982] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1662.726982] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e08c04-6c22-b64e-77e8-14f44c119567" [ 1662.726982] env[63241]: _type = "Task" [ 1662.726982] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.735806] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e08c04-6c22-b64e-77e8-14f44c119567, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.875583] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.894s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.877799] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.923s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.878051] env[63241]: DEBUG nova.objects.instance [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lazy-loading 'resources' on Instance uuid a1a8342a-b00e-42c1-8c01-a95659a78caf {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1662.894114] env[63241]: INFO nova.scheduler.client.report [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Deleted allocations for instance 20c7a1a1-4396-414f-a52c-06551722b6eb [ 1662.898844] env[63241]: DEBUG oslo_vmware.api [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820623, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.002982] env[63241]: DEBUG nova.compute.manager [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1663.014417] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.028085] env[63241]: DEBUG nova.virt.hardware [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1663.028328] env[63241]: DEBUG nova.virt.hardware [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1663.028486] env[63241]: DEBUG nova.virt.hardware [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1663.028672] env[63241]: DEBUG nova.virt.hardware [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1663.028819] env[63241]: DEBUG nova.virt.hardware [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1663.028966] env[63241]: DEBUG nova.virt.hardware [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1663.029188] env[63241]: DEBUG nova.virt.hardware [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1663.029350] env[63241]: DEBUG nova.virt.hardware [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1663.029521] env[63241]: DEBUG nova.virt.hardware [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1663.029683] env[63241]: DEBUG nova.virt.hardware [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1663.029854] env[63241]: DEBUG nova.virt.hardware [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1663.030661] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deaf21a6-c1c2-4a34-a48c-b13c6febe27e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.037945] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b6b217-b38d-4ed7-99f5-7443b8ed6df0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.051210] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1663.056873] env[63241]: DEBUG oslo.service.loopingcall [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1663.057133] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1663.057348] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b98efb55-a8a7-4dbc-81a1-79c9e347a186 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.073169] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1663.073169] env[63241]: value = "task-1820624" [ 1663.073169] env[63241]: _type = "Task" [ 1663.073169] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.083206] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820624, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.239277] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e08c04-6c22-b64e-77e8-14f44c119567, 'name': SearchDatastore_Task, 'duration_secs': 0.009835} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.240216] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8d1eef1-4b25-491e-906a-8029fa7eaa7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.246136] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1663.246136] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522d0d65-f3b5-1b15-efe2-edd00b4ec628" [ 1663.246136] env[63241]: _type = "Task" [ 1663.246136] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.253678] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522d0d65-f3b5-1b15-efe2-edd00b4ec628, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.399504] env[63241]: DEBUG oslo_vmware.api [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820623, 'name': PowerOnVM_Task, 'duration_secs': 0.746545} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.399504] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1663.399504] env[63241]: DEBUG nova.compute.manager [None req-30abfb54-c522-4c10-9cf6-2dad8663a737 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1663.400154] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a322342-d0a4-4c70-87d8-4aaf3c404006 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.405477] env[63241]: DEBUG oslo_concurrency.lockutils [None req-71154dbf-e0df-4db8-a362-89c63cf8b5e3 tempest-InstanceActionsTestJSON-703161853 tempest-InstanceActionsTestJSON-703161853-project-member] Lock "20c7a1a1-4396-414f-a52c-06551722b6eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.348s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.517282] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.584337] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820624, 'name': CreateVM_Task, 'duration_secs': 0.276125} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.586706] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1663.588064] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.588369] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.588779] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1663.589323] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f04c187-3a28-4b24-9871-329ec1099c7c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.593939] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1663.593939] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ec0663-ff7a-6318-ab85-c9fb0500620b" [ 1663.593939] env[63241]: _type = "Task" [ 1663.593939] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.604881] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ec0663-ff7a-6318-ab85-c9fb0500620b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.667020] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68182910-46da-4c09-b06b-c67ec839ad17 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.672077] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c346e636-098f-4ffc-a563-a9ab879d3a84 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.704055] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f616de9a-efdf-4fbb-9d61-b08e4e1fae17 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.717343] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4d5255-aede-4d30-ba6c-4a57b28cb195 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.730670] env[63241]: DEBUG nova.compute.provider_tree [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1663.744086] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1663.744086] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1663.744086] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleting the datastore file [datastore1] 7158c64a-5036-419b-b110-7e22c12bf3dd {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1663.744086] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b99fdee0-1a8b-4a5c-9bd0-df5922327c55 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.753547] env[63241]: DEBUG oslo_vmware.api [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1663.753547] env[63241]: value = "task-1820625" [ 1663.753547] env[63241]: _type = "Task" [ 1663.753547] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.763673] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522d0d65-f3b5-1b15-efe2-edd00b4ec628, 'name': SearchDatastore_Task, 'duration_secs': 0.012947} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.765364] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.765766] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 1dc98fbd-a52b-42fa-8d37-d14318dbc941/1dc98fbd-a52b-42fa-8d37-d14318dbc941.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1663.766243] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1663.766546] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1663.766828] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Deleting the datastore file [datastore1] effc3987-45d0-4305-83a2-0eba47d2c7fd {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1663.769846] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dec808cd-88ad-468a-a6b6-7bd9ca03fe41 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.772696] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bee5d8d8-3061-4079-a306-55f670be02e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.774609] env[63241]: DEBUG oslo_vmware.api [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.779311] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1663.779311] env[63241]: value = "task-1820626" [ 1663.779311] env[63241]: _type = "Task" [ 1663.779311] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.780703] env[63241]: DEBUG oslo_vmware.api [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for the task: (returnval){ [ 1663.780703] env[63241]: value = "task-1820627" [ 1663.780703] env[63241]: _type = "Task" [ 1663.780703] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.795329] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820626, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.795329] env[63241]: DEBUG oslo_vmware.api [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820627, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.016802] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.105907] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ec0663-ff7a-6318-ab85-c9fb0500620b, 'name': SearchDatastore_Task, 'duration_secs': 0.010263} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.106337] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1664.106630] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1664.106929] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1664.107102] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1664.107338] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1664.107679] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55cd52dd-c031-42a9-ad96-3f91964dfcf4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.119370] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1664.119588] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1664.121035] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d78e5314-b136-480b-a6f6-08e74ab19f48 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.127186] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1664.127186] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52bc95f9-e851-4523-5e06-a6c878e38655" [ 1664.127186] env[63241]: _type = "Task" [ 1664.127186] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.135773] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52bc95f9-e851-4523-5e06-a6c878e38655, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.234816] env[63241]: DEBUG nova.scheduler.client.report [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1664.264409] env[63241]: DEBUG oslo_vmware.api [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153562} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.264621] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1664.264800] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1664.264977] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1664.265173] env[63241]: INFO nova.compute.manager [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Took 8.38 seconds to destroy the instance on the hypervisor. [ 1664.267509] env[63241]: DEBUG oslo.service.loopingcall [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1664.267509] env[63241]: DEBUG nova.compute.manager [-] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1664.267509] env[63241]: DEBUG nova.network.neutron [-] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1664.291338] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820626, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458081} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.292098] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 1dc98fbd-a52b-42fa-8d37-d14318dbc941/1dc98fbd-a52b-42fa-8d37-d14318dbc941.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1664.292319] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1664.292599] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c891abdc-6650-4e5d-95d3-b5cf3c16600b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.297595] env[63241]: DEBUG oslo_vmware.api [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Task: {'id': task-1820627, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149986} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.298282] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1664.298489] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1664.299319] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1664.299534] env[63241]: INFO nova.compute.manager [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Took 6.99 seconds to destroy the instance on the hypervisor. [ 1664.300155] env[63241]: DEBUG oslo.service.loopingcall [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1664.300363] env[63241]: DEBUG nova.compute.manager [-] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1664.300460] env[63241]: DEBUG nova.network.neutron [-] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1664.303469] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1664.303469] env[63241]: value = "task-1820628" [ 1664.303469] env[63241]: _type = "Task" [ 1664.303469] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.319224] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820628, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.521602] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820611, 'name': CreateVM_Task, 'duration_secs': 7.730163} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.521779] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1664.525192] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1664.525192] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1664.525192] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1664.525192] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4314cd6-c282-43fb-b3b8-4aa96174aa23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.533097] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1664.533097] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d0dc6b-c626-a0c3-ca19-77bcaa9e4418" [ 1664.533097] env[63241]: _type = "Task" [ 1664.533097] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.542681] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d0dc6b-c626-a0c3-ca19-77bcaa9e4418, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.572326] env[63241]: DEBUG nova.compute.manager [req-b47f0612-8781-4cd0-9bcd-53b7403c6850 req-801a5bd0-9ee7-4bde-be0e-4c1af56cef85 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Received event network-vif-deleted-e6572a16-56c8-4c47-99ab-22d12f1f2ffc {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1664.572595] env[63241]: INFO nova.compute.manager [req-b47f0612-8781-4cd0-9bcd-53b7403c6850 req-801a5bd0-9ee7-4bde-be0e-4c1af56cef85 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Neutron deleted interface e6572a16-56c8-4c47-99ab-22d12f1f2ffc; detaching it from the instance and deleting it from the info cache [ 1664.572829] env[63241]: DEBUG nova.network.neutron [req-b47f0612-8781-4cd0-9bcd-53b7403c6850 req-801a5bd0-9ee7-4bde-be0e-4c1af56cef85 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.644446] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52bc95f9-e851-4523-5e06-a6c878e38655, 'name': SearchDatastore_Task, 'duration_secs': 0.048533} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.645513] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4480471-f6e4-45ae-95e5-601cb4bb8311 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.651295] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1664.651295] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521f835c-0b03-fce5-849c-0267f70915bb" [ 1664.651295] env[63241]: _type = "Task" [ 1664.651295] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.660289] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521f835c-0b03-fce5-849c-0267f70915bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.742227] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.864s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.744474] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.190s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.744853] env[63241]: DEBUG nova.objects.instance [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lazy-loading 'resources' on Instance uuid 1626092d-78ef-41b5-8b47-fb840d63e4f4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1664.769010] env[63241]: INFO nova.scheduler.client.report [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Deleted allocations for instance a1a8342a-b00e-42c1-8c01-a95659a78caf [ 1664.815294] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820628, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059884} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.818267] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1664.819129] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8ec5ae-8427-47cb-81b4-ec59fab88fee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.848571] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 1dc98fbd-a52b-42fa-8d37-d14318dbc941/1dc98fbd-a52b-42fa-8d37-d14318dbc941.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1664.851326] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f24bce7-b200-466f-926a-d4925cc7a779 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.870196] env[63241]: DEBUG nova.network.neutron [-] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.877460] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1664.877460] env[63241]: value = "task-1820629" [ 1664.877460] env[63241]: _type = "Task" [ 1664.877460] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.887077] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820629, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.901594] env[63241]: DEBUG nova.compute.manager [req-0da4dfd2-48dc-41e3-9ae5-0ab9fdbf6073 req-c4360c49-bcd2-4bc6-a4ab-b939674a970e service nova] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Received event network-vif-deleted-fbb105cc-6670-49e6-8d68-5e7a6db44e8d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1665.047104] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d0dc6b-c626-a0c3-ca19-77bcaa9e4418, 'name': SearchDatastore_Task, 'duration_secs': 0.009522} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.047104] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.047104] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1665.047104] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.047302] env[63241]: DEBUG nova.network.neutron [-] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1665.077790] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-03191666-c055-4f8f-87a0-8f7b17e833df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.088158] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f833e90-5a4a-40c5-a63f-3d0aaf1f6709 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.106500] env[63241]: DEBUG nova.network.neutron [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Successfully updated port: febed88f-91b2-4546-82de-5dd1a1f73020 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1665.123271] env[63241]: DEBUG nova.compute.manager [req-b47f0612-8781-4cd0-9bcd-53b7403c6850 req-801a5bd0-9ee7-4bde-be0e-4c1af56cef85 service nova] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Detach interface failed, port_id=e6572a16-56c8-4c47-99ab-22d12f1f2ffc, reason: Instance effc3987-45d0-4305-83a2-0eba47d2c7fd could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1665.163354] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521f835c-0b03-fce5-849c-0267f70915bb, 'name': SearchDatastore_Task, 'duration_secs': 0.010577} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.163354] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.163354] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 4a57d04b-72a0-4db3-8119-994b67e4b096/4a57d04b-72a0-4db3-8119-994b67e4b096.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1665.163649] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.163798] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1665.164056] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b973ce8b-c665-4653-90f1-acdd0652a031 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.166432] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-345ffb01-c666-40a0-905a-c6ca15c9209d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.173580] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1665.173580] env[63241]: value = "task-1820630" [ 1665.173580] env[63241]: _type = "Task" [ 1665.173580] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.178535] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1665.178792] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1665.179932] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-833b67d5-984a-4486-a5f6-5608c3e839a9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.186682] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820630, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.189858] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1665.189858] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5272ea90-10e9-57f9-9b69-455c046e85d8" [ 1665.189858] env[63241]: _type = "Task" [ 1665.189858] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.198038] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5272ea90-10e9-57f9-9b69-455c046e85d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.251079] env[63241]: DEBUG nova.objects.instance [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lazy-loading 'numa_topology' on Instance uuid 1626092d-78ef-41b5-8b47-fb840d63e4f4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1665.279635] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fc71084d-b72d-4452-b204-4ff8bbac48f8 tempest-ServersAdminTestJSON-675103814 tempest-ServersAdminTestJSON-675103814-project-member] Lock "a1a8342a-b00e-42c1-8c01-a95659a78caf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.282s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.373495] env[63241]: INFO nova.compute.manager [-] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Took 1.11 seconds to deallocate network for instance. [ 1665.394445] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820629, 'name': ReconfigVM_Task, 'duration_secs': 0.26193} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.394713] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 1dc98fbd-a52b-42fa-8d37-d14318dbc941/1dc98fbd-a52b-42fa-8d37-d14318dbc941.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1665.395504] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f67e4c9-c841-42b3-875b-9da619f75fcc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.405055] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1665.405055] env[63241]: value = "task-1820631" [ 1665.405055] env[63241]: _type = "Task" [ 1665.405055] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.423023] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820631, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.550218] env[63241]: INFO nova.compute.manager [-] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Took 1.25 seconds to deallocate network for instance. [ 1665.609303] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "refresh_cache-f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.610142] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "refresh_cache-f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.610438] env[63241]: DEBUG nova.network.neutron [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1665.684833] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820630, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482231} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.685449] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 4a57d04b-72a0-4db3-8119-994b67e4b096/4a57d04b-72a0-4db3-8119-994b67e4b096.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1665.685683] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1665.685978] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8bc682a-674a-4627-a8e5-0dab8cfb15bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.695436] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1665.695436] env[63241]: value = "task-1820632" [ 1665.695436] env[63241]: _type = "Task" [ 1665.695436] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.702806] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5272ea90-10e9-57f9-9b69-455c046e85d8, 'name': SearchDatastore_Task, 'duration_secs': 0.00938} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.703953] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb938a78-dac8-4ca1-a275-ada54a19f8f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.708962] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820632, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.712085] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1665.712085] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d31dc5-35dc-a09b-373e-90cf6779e92f" [ 1665.712085] env[63241]: _type = "Task" [ 1665.712085] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.722478] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d31dc5-35dc-a09b-373e-90cf6779e92f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.753834] env[63241]: DEBUG nova.objects.base [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Object Instance<1626092d-78ef-41b5-8b47-fb840d63e4f4> lazy-loaded attributes: resources,numa_topology {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1665.887135] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.916925] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820631, 'name': Rename_Task, 'duration_secs': 0.318009} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.918122] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1665.918122] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00026c3c-b9d5-4472-a555-53da271088f1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.924078] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1665.924078] env[63241]: value = "task-1820633" [ 1665.924078] env[63241]: _type = "Task" [ 1665.924078] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.938563] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.061135] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.076973] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db83333b-cf2a-4fc4-8436-eafa16878bbe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.085325] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae89dda-d334-42e0-bf11-a5e4de749dfa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.118622] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb9fa2d-2c2d-4606-911e-4f0b2be175bd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.127152] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289b2978-316e-4734-b8f7-ee0f697ebc5b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.141799] env[63241]: DEBUG nova.compute.provider_tree [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1666.208228] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820632, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070985} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.208876] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1666.210688] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8d1e17-ea3e-43bb-bac8-eddc226b6111 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.251899] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 4a57d04b-72a0-4db3-8119-994b67e4b096/4a57d04b-72a0-4db3-8119-994b67e4b096.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1666.256035] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d219086-35a8-4fc0-a3c4-693f8f0a5b72 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.270441] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d31dc5-35dc-a09b-373e-90cf6779e92f, 'name': SearchDatastore_Task, 'duration_secs': 0.012122} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.270786] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1666.271152] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 7f1710d0-857d-41fc-8151-8c5e129dda08/7f1710d0-857d-41fc-8151-8c5e129dda08.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1666.271835] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7eecd7b-1c9e-45c9-9441-677c5ff16a72 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.277239] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1666.277239] env[63241]: value = "task-1820634" [ 1666.277239] env[63241]: _type = "Task" [ 1666.277239] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.281382] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1666.281382] env[63241]: value = "task-1820635" [ 1666.281382] env[63241]: _type = "Task" [ 1666.281382] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.288434] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820634, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.293218] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820635, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.351342] env[63241]: DEBUG nova.network.neutron [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1666.352245] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1666.352601] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1666.434559] env[63241]: DEBUG oslo_vmware.api [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820633, 'name': PowerOnVM_Task, 'duration_secs': 0.508894} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.434937] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1666.435206] env[63241]: INFO nova.compute.manager [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Took 5.86 seconds to spawn the instance on the hypervisor. [ 1666.435439] env[63241]: DEBUG nova.compute.manager [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1666.436333] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698670ad-9ecf-469e-b38d-baf5a243446f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.632905] env[63241]: DEBUG nova.network.neutron [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Updating instance_info_cache with network_info: [{"id": "febed88f-91b2-4546-82de-5dd1a1f73020", "address": "fa:16:3e:f5:ad:58", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfebed88f-91", "ovs_interfaceid": "febed88f-91b2-4546-82de-5dd1a1f73020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.644743] env[63241]: DEBUG nova.scheduler.client.report [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1666.796828] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820634, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.800524] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820635, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.862449] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1666.862597] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1666.962211] env[63241]: DEBUG nova.compute.manager [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Received event network-vif-plugged-febed88f-91b2-4546-82de-5dd1a1f73020 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1666.962546] env[63241]: DEBUG oslo_concurrency.lockutils [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] Acquiring lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.962912] env[63241]: DEBUG oslo_concurrency.lockutils [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] Lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.963224] env[63241]: DEBUG oslo_concurrency.lockutils [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] Lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.963506] env[63241]: DEBUG nova.compute.manager [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] No waiting events found dispatching network-vif-plugged-febed88f-91b2-4546-82de-5dd1a1f73020 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1666.963768] env[63241]: WARNING nova.compute.manager [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Received unexpected event network-vif-plugged-febed88f-91b2-4546-82de-5dd1a1f73020 for instance with vm_state building and task_state spawning. [ 1666.964236] env[63241]: DEBUG nova.compute.manager [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Received event network-changed-febed88f-91b2-4546-82de-5dd1a1f73020 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1666.964531] env[63241]: DEBUG nova.compute.manager [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Refreshing instance network info cache due to event network-changed-febed88f-91b2-4546-82de-5dd1a1f73020. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1666.964885] env[63241]: DEBUG oslo_concurrency.lockutils [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] Acquiring lock "refresh_cache-f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.965651] env[63241]: INFO nova.compute.manager [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Took 23.05 seconds to build instance. [ 1667.135777] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "refresh_cache-f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.136265] env[63241]: DEBUG nova.compute.manager [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Instance network_info: |[{"id": "febed88f-91b2-4546-82de-5dd1a1f73020", "address": "fa:16:3e:f5:ad:58", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfebed88f-91", "ovs_interfaceid": "febed88f-91b2-4546-82de-5dd1a1f73020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1667.137384] env[63241]: DEBUG oslo_concurrency.lockutils [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] Acquired lock "refresh_cache-f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.137617] env[63241]: DEBUG nova.network.neutron [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Refreshing network info cache for port febed88f-91b2-4546-82de-5dd1a1f73020 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1667.139973] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:ad:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '184687d6-125a-4b58-bb5b-fdb404088eda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'febed88f-91b2-4546-82de-5dd1a1f73020', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1667.152209] env[63241]: DEBUG oslo.service.loopingcall [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1667.153174] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.409s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.157649] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1667.157778] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 16.059s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.159266] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23f0b920-112b-4174-a9dc-5641a5344939 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.184891] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1667.184891] env[63241]: value = "task-1820636" [ 1667.184891] env[63241]: _type = "Task" [ 1667.184891] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.195198] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820636, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.295834] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820635, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.903081} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.299648] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 7f1710d0-857d-41fc-8151-8c5e129dda08/7f1710d0-857d-41fc-8151-8c5e129dda08.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1667.299648] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1667.299648] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820634, 'name': ReconfigVM_Task, 'duration_secs': 0.846175} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.299809] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81a9c07d-1465-4540-834e-830051e81ef3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.302437] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 4a57d04b-72a0-4db3-8119-994b67e4b096/4a57d04b-72a0-4db3-8119-994b67e4b096.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1667.303122] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2905b4c-f0ae-402a-a7cd-d21908a6f8eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.309674] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1667.309674] env[63241]: value = "task-1820638" [ 1667.309674] env[63241]: _type = "Task" [ 1667.309674] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.311132] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1667.311132] env[63241]: value = "task-1820637" [ 1667.311132] env[63241]: _type = "Task" [ 1667.311132] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.326497] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820637, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.326767] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820638, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.448323] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "1e172f73-972e-4401-b358-512f7e03b27f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.450073] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.469156] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b87c8c74-dfcd-416b-9b53-e27ea781f349 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "1dc98fbd-a52b-42fa-8d37-d14318dbc941" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.568s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.666335] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b46500fc-f672-4348-8f19-8fc796335594 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 35.980s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.666580] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 15.581s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.668932] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "1626092d-78ef-41b5-8b47-fb840d63e4f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.668932] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.668932] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.673672] env[63241]: INFO nova.compute.manager [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Terminating instance [ 1667.677234] env[63241]: DEBUG nova.compute.manager [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1667.677658] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1667.678023] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-50a3d8ef-4477-4f5d-81a0-b1714251d425 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.685313] env[63241]: INFO nova.compute.claims [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1667.698847] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c83cd1-d3d8-4732-add3-aa39b255584c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.721873] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820636, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.746391] env[63241]: WARNING nova.virt.vmwareapi.vmops [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1626092d-78ef-41b5-8b47-fb840d63e4f4 could not be found. [ 1667.746679] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1667.747182] env[63241]: INFO nova.compute.manager [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Took 0.07 seconds to destroy the instance on the hypervisor. [ 1667.747599] env[63241]: DEBUG oslo.service.loopingcall [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1667.747875] env[63241]: DEBUG nova.compute.manager [-] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1667.748371] env[63241]: DEBUG nova.network.neutron [-] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1667.826779] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820637, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094501} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.827114] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820638, 'name': Rename_Task, 'duration_secs': 0.186976} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.827253] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1667.827530] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1667.828241] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f624fd-1dcd-4aab-957f-a71c011cd02c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.830683] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0f30c52-0a85-40d1-b99d-53a88e2b5950 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.853262] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 7f1710d0-857d-41fc-8151-8c5e129dda08/7f1710d0-857d-41fc-8151-8c5e129dda08.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1667.855467] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-793480f4-5e6a-42bd-bd12-02a6630a53a9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.872949] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1667.872949] env[63241]: value = "task-1820639" [ 1667.872949] env[63241]: _type = "Task" [ 1667.872949] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.884736] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1667.884736] env[63241]: value = "task-1820640" [ 1667.884736] env[63241]: _type = "Task" [ 1667.884736] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.890868] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820639, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.895829] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820640, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.952545] env[63241]: DEBUG nova.compute.manager [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1667.983549] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.983825] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.168775] env[63241]: DEBUG nova.network.neutron [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Updated VIF entry in instance network info cache for port febed88f-91b2-4546-82de-5dd1a1f73020. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1668.169062] env[63241]: DEBUG nova.network.neutron [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Updating instance_info_cache with network_info: [{"id": "febed88f-91b2-4546-82de-5dd1a1f73020", "address": "fa:16:3e:f5:ad:58", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfebed88f-91", "ovs_interfaceid": "febed88f-91b2-4546-82de-5dd1a1f73020", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.193269] env[63241]: INFO nova.compute.resource_tracker [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating resource usage from migration 5eaba4ce-595b-4b48-b05e-b26491a5c40b [ 1668.208289] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820636, 'name': CreateVM_Task, 'duration_secs': 0.575897} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.208289] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1668.208821] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1668.209018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.209372] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1668.209688] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c8f85df-a44a-4529-a214-8731980c9f42 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.216223] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1668.216223] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5213bae8-b4a8-ec55-4e28-de388c4b2fda" [ 1668.216223] env[63241]: _type = "Task" [ 1668.216223] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.225274] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5213bae8-b4a8-ec55-4e28-de388c4b2fda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.386066] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "73ea6bff-60da-4691-a569-f4e9ae92f701" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.386321] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "73ea6bff-60da-4691-a569-f4e9ae92f701" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.401949] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820639, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.409179] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820640, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.478050] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.488245] env[63241]: DEBUG nova.compute.manager [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1668.606753] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f378c21f-a15a-43df-a6d9-3ec1428a4bea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.614790] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2353ee-dd78-42d8-96ff-618d7d93ac8b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.644935] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ce2a28-36ca-45a0-bfb6-5e1a84a18d85 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.653057] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a8f7cb-8ef1-4ee6-8a81-7a59a3ece0ee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.667679] env[63241]: DEBUG nova.compute.provider_tree [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1668.673021] env[63241]: DEBUG oslo_concurrency.lockutils [req-53dff199-2512-4d5e-8090-730bed6b3989 req-dd4a7f78-e970-4b6f-b98c-aeadaa76a437 service nova] Releasing lock "refresh_cache-f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.727599] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5213bae8-b4a8-ec55-4e28-de388c4b2fda, 'name': SearchDatastore_Task, 'duration_secs': 0.020983} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.727920] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.728178] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1668.728414] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1668.728555] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.728727] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1668.728989] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9aa9553-c164-498e-9036-b7bc54e7ce6b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.730908] env[63241]: DEBUG nova.network.neutron [-] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.765149] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1668.765328] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1668.766223] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8697eedb-d1ce-42e0-b409-d726412c0a4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.772780] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1668.772780] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52dc7076-0bb7-8053-f68e-098b205fa212" [ 1668.772780] env[63241]: _type = "Task" [ 1668.772780] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.782420] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dc7076-0bb7-8053-f68e-098b205fa212, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.887047] env[63241]: DEBUG oslo_vmware.api [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820639, 'name': PowerOnVM_Task, 'duration_secs': 0.984454} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.887360] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1668.887568] env[63241]: INFO nova.compute.manager [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Took 5.88 seconds to spawn the instance on the hypervisor. [ 1668.887701] env[63241]: DEBUG nova.compute.manager [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1668.888507] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e203e5-bb09-4b2c-867b-610f293fdc9a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.891329] env[63241]: DEBUG nova.compute.manager [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1668.906502] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820640, 'name': ReconfigVM_Task, 'duration_secs': 0.912748} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.906843] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 7f1710d0-857d-41fc-8151-8c5e129dda08/7f1710d0-857d-41fc-8151-8c5e129dda08.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1668.907497] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cec46c9f-cbd7-4a04-9211-859e55833da6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.914551] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1668.914551] env[63241]: value = "task-1820641" [ 1668.914551] env[63241]: _type = "Task" [ 1668.914551] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.923653] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820641, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.927548] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.927710] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.013888] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.171517] env[63241]: DEBUG nova.scheduler.client.report [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1669.233130] env[63241]: INFO nova.compute.manager [-] [instance: 1626092d-78ef-41b5-8b47-fb840d63e4f4] Took 1.48 seconds to deallocate network for instance. [ 1669.285597] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dc7076-0bb7-8053-f68e-098b205fa212, 'name': SearchDatastore_Task, 'duration_secs': 0.012575} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.287138] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6aa14a31-febe-49f7-a400-6ea46d4a6913 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.293189] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1669.293189] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528b5166-d10e-f184-d6ff-119e3565f7ab" [ 1669.293189] env[63241]: _type = "Task" [ 1669.293189] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.301708] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528b5166-d10e-f184-d6ff-119e3565f7ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.414564] env[63241]: INFO nova.compute.manager [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Took 25.28 seconds to build instance. [ 1669.436049] env[63241]: DEBUG nova.compute.manager [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1669.436993] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820641, 'name': Rename_Task, 'duration_secs': 0.233747} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.438105] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.439493] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1669.440112] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae08ed3f-33ae-4d2b-9241-82bf08efd216 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.448602] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1669.448602] env[63241]: value = "task-1820642" [ 1669.448602] env[63241]: _type = "Task" [ 1669.448602] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.458565] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.678408] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.519s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.678408] env[63241]: INFO nova.compute.manager [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Migrating [ 1669.684864] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.090s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.686378] env[63241]: INFO nova.compute.claims [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1669.805369] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528b5166-d10e-f184-d6ff-119e3565f7ab, 'name': SearchDatastore_Task, 'duration_secs': 0.02324} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.805799] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.806212] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067/f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1669.806689] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72e4faa6-d10a-4414-b347-54b91751fcae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.814688] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1669.814688] env[63241]: value = "task-1820643" [ 1669.814688] env[63241]: _type = "Task" [ 1669.814688] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.824890] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820643, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.923818] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6c4a69bb-8186-4051-8123-67f55476851a tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "4a57d04b-72a0-4db3-8119-994b67e4b096" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.796s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.954121] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.960855] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820642, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.202416] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.202700] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.202973] env[63241]: DEBUG nova.network.neutron [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1670.259874] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78c10a3e-ed37-40b7-bcb0-4e9fce5bbd37 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "1626092d-78ef-41b5-8b47-fb840d63e4f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.593s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.274344] env[63241]: INFO nova.compute.manager [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Rebuilding instance [ 1670.325732] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820643, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.337983] env[63241]: DEBUG nova.compute.manager [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1670.338931] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ab2135-be99-4443-a4ca-24719225b1c4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.460181] env[63241]: DEBUG oslo_vmware.api [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820642, 'name': PowerOnVM_Task, 'duration_secs': 0.912578} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.460914] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1670.460996] env[63241]: INFO nova.compute.manager [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Took 16.81 seconds to spawn the instance on the hypervisor. [ 1670.461132] env[63241]: DEBUG nova.compute.manager [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1670.462030] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a3fa4a-0d79-4821-b3ee-c724fcc9d1e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.830370] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820643, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.66282} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.830802] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067/f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1670.831173] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1670.831515] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae0bcd35-6d31-4fbe-be3e-2398d8654de9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.841107] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1670.841107] env[63241]: value = "task-1820644" [ 1670.841107] env[63241]: _type = "Task" [ 1670.841107] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.847525] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820644, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.851311] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1670.855029] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5de3f029-f590-43d3-8077-5bf3d2887f89 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.861605] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1670.861605] env[63241]: value = "task-1820645" [ 1670.861605] env[63241]: _type = "Task" [ 1670.861605] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.871286] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.981299] env[63241]: INFO nova.compute.manager [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Took 33.34 seconds to build instance. [ 1671.027262] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f539bc78-7df5-4267-8ea1-456c50258507 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.035644] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a843701c-aa95-41a8-bfac-d50e340e7687 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.069243] env[63241]: DEBUG nova.network.neutron [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance_info_cache with network_info: [{"id": "5546d295-8d78-4143-b874-e6cc21c5945a", "address": "fa:16:3e:8b:83:aa", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5546d295-8d", "ovs_interfaceid": "5546d295-8d78-4143-b874-e6cc21c5945a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.070948] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f93a48-aa11-465f-a805-5ad0097442ce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.078772] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdab2078-d294-4dd7-bce8-2b726d8d82b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.094708] env[63241]: DEBUG nova.compute.provider_tree [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1671.349310] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820644, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066886} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.349557] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1671.350447] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e1fdb1-6576-47d5-b06c-c79efc57f0f8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.374509] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067/f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1671.378890] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e1ba48c-fbc2-4357-9a1c-85261ed48876 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.396393] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.396627] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.403670] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820645, 'name': PowerOffVM_Task, 'duration_secs': 0.393713} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.404970] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1671.405211] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1671.405545] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1671.405545] env[63241]: value = "task-1820646" [ 1671.405545] env[63241]: _type = "Task" [ 1671.405545] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.406276] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a061aa38-fa98-40b7-87e3-604ec5c1ba37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.416465] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1671.420530] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9165c517-b682-421e-b659-411ae4cb8cbb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.422024] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820646, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.446886] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1671.447120] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1671.447312] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Deleting the datastore file [datastore1] 4a57d04b-72a0-4db3-8119-994b67e4b096 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1671.447565] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f66d006-3321-4572-ba60-47ee6bdb6972 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.455169] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1671.455169] env[63241]: value = "task-1820648" [ 1671.455169] env[63241]: _type = "Task" [ 1671.455169] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.463853] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820648, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.483078] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f9feeed3-00d3-4ac1-8248-cac47eec7ac0 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "7f1710d0-857d-41fc-8151-8c5e129dda08" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.848s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.574575] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.598393] env[63241]: DEBUG nova.scheduler.client.report [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1671.900106] env[63241]: DEBUG nova.compute.manager [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1671.920343] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820646, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.966990] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820648, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292757} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.967411] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1671.967683] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1671.967953] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1672.104671] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.105124] env[63241]: DEBUG nova.compute.manager [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1672.108225] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.378s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.112033] env[63241]: INFO nova.compute.claims [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1672.419989] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820646, 'name': ReconfigVM_Task, 'duration_secs': 0.542463} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.420551] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Reconfigured VM instance instance-00000049 to attach disk [datastore1] f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067/f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1672.420912] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2625e66e-c270-4331-b06c-260effa17319 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.423961] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.425471] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1672.426395] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.426395] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.426395] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.426561] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.426603] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.426783] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.426948] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1672.427130] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.429196] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1672.429196] env[63241]: value = "task-1820649" [ 1672.429196] env[63241]: _type = "Task" [ 1672.429196] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.437082] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820649, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.614271] env[63241]: DEBUG nova.compute.utils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1672.618895] env[63241]: DEBUG nova.compute.manager [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1672.619188] env[63241]: DEBUG nova.network.neutron [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1672.655142] env[63241]: DEBUG nova.compute.manager [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Stashing vm_state: active {{(pid=63241) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1672.690680] env[63241]: DEBUG nova.policy [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16508acd49bf4efba4f9c509a2dc5fd4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d1a62ae45c74a7ba071363005b3a52e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1672.931620] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.940846] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820649, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.004593] env[63241]: DEBUG nova.virt.hardware [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1673.004888] env[63241]: DEBUG nova.virt.hardware [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1673.005076] env[63241]: DEBUG nova.virt.hardware [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1673.005291] env[63241]: DEBUG nova.virt.hardware [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1673.005445] env[63241]: DEBUG nova.virt.hardware [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1673.005606] env[63241]: DEBUG nova.virt.hardware [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1673.005849] env[63241]: DEBUG nova.virt.hardware [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1673.006150] env[63241]: DEBUG nova.virt.hardware [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1673.006369] env[63241]: DEBUG nova.virt.hardware [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1673.006552] env[63241]: DEBUG nova.virt.hardware [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1673.006727] env[63241]: DEBUG nova.virt.hardware [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1673.007625] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa8c959-1d74-496f-8dc7-db2e1128d401 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.016106] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdae78e0-5116-414a-b71f-352435ea628b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.031925] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1673.038110] env[63241]: DEBUG oslo.service.loopingcall [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.038401] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1673.038634] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68d45619-8bfc-41f2-b289-2a1b4a60135b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.056096] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1673.056096] env[63241]: value = "task-1820650" [ 1673.056096] env[63241]: _type = "Task" [ 1673.056096] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.064663] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820650, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.092885] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1801264c-45bd-4fa5-bae4-1a3cf8fac1d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.113220] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance 'cb7eb689-b8f6-479d-aa6b-c27fab16e131' progress to 0 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1673.119865] env[63241]: DEBUG nova.compute.manager [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1673.182506] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.259775] env[63241]: DEBUG nova.network.neutron [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Successfully created port: 24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1673.442274] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820649, 'name': Rename_Task, 'duration_secs': 0.667958} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.442599] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1673.442826] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9dc2626e-6170-4b35-80ae-67da73f4dd35 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.449801] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1673.449801] env[63241]: value = "task-1820651" [ 1673.449801] env[63241]: _type = "Task" [ 1673.449801] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.459278] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820651, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.519377] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fcc6b2-7cff-4850-a8b7-cb9f746969eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.527444] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ffdb58-87dc-47a4-b6c7-83bcca3ca089 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.561379] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559e58bf-1e5b-47c2-8a6a-ae3d26f6dc8c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.572234] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf183a4-00fa-45f0-9717-85580d806128 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.575832] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820650, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.585981] env[63241]: DEBUG nova.compute.provider_tree [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1673.619418] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1673.619856] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e37d93ea-db19-49b7-9568-0ed122c23924 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.629775] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1673.629775] env[63241]: value = "task-1820652" [ 1673.629775] env[63241]: _type = "Task" [ 1673.629775] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.640022] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820652, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.962011] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820651, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.068503] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820650, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.089705] env[63241]: DEBUG nova.scheduler.client.report [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1674.135849] env[63241]: DEBUG nova.compute.manager [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1674.144135] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820652, 'name': PowerOffVM_Task, 'duration_secs': 0.204285} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.144488] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1674.144740] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance 'cb7eb689-b8f6-479d-aa6b-c27fab16e131' progress to 17 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1674.163407] env[63241]: DEBUG nova.virt.hardware [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1674.163815] env[63241]: DEBUG nova.virt.hardware [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1674.164019] env[63241]: DEBUG nova.virt.hardware [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1674.164203] env[63241]: DEBUG nova.virt.hardware [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1674.164445] env[63241]: DEBUG nova.virt.hardware [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1674.164741] env[63241]: DEBUG nova.virt.hardware [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1674.165098] env[63241]: DEBUG nova.virt.hardware [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1674.165188] env[63241]: DEBUG nova.virt.hardware [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1674.165420] env[63241]: DEBUG nova.virt.hardware [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1674.165721] env[63241]: DEBUG nova.virt.hardware [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1674.166045] env[63241]: DEBUG nova.virt.hardware [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1674.169070] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82db6f0d-0646-4c14-adf0-8205ad69e31d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.177429] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfc0afd-2b87-46ad-8eb8-1f1cdae86e03 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.461791] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820651, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.569754] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820650, 'name': CreateVM_Task, 'duration_secs': 1.427053} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.569909] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1674.570426] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.570527] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.570905] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1674.571190] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84099ab4-cb07-47a7-9571-d901fa59c911 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.576933] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1674.576933] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5275c568-e171-4207-6763-e978f9bfdb48" [ 1674.576933] env[63241]: _type = "Task" [ 1674.576933] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.586204] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5275c568-e171-4207-6763-e978f9bfdb48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.595223] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.487s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.595816] env[63241]: DEBUG nova.compute.manager [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1674.600144] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.712s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.600144] env[63241]: DEBUG nova.objects.instance [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lazy-loading 'resources' on Instance uuid 7158c64a-5036-419b-b110-7e22c12bf3dd {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1674.651556] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1674.651930] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1674.652043] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1674.652447] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1674.652615] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1674.652790] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1674.653067] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1674.653336] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1674.653741] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1674.654050] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1674.654237] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1674.660180] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa28f8e0-a698-48d3-825f-ef0a335315df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.678292] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1674.678292] env[63241]: value = "task-1820653" [ 1674.678292] env[63241]: _type = "Task" [ 1674.678292] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.688011] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820653, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.966271] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820651, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.087850] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5275c568-e171-4207-6763-e978f9bfdb48, 'name': SearchDatastore_Task, 'duration_secs': 0.017631} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.088279] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.088520] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1675.090582] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.090820] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.091053] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1675.091351] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2f5e45b-4121-4fe9-a976-e0b023b08084 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.104089] env[63241]: DEBUG nova.compute.utils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1675.104089] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1675.104305] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1675.105986] env[63241]: DEBUG nova.compute.manager [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1675.105986] env[63241]: DEBUG nova.network.neutron [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1675.110404] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-949a283b-9c6a-45ef-985a-653c7a65ef23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.118989] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1675.118989] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52753892-1f58-9068-b76b-91bb53ec9d52" [ 1675.118989] env[63241]: _type = "Task" [ 1675.118989] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.139650] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52753892-1f58-9068-b76b-91bb53ec9d52, 'name': SearchDatastore_Task, 'duration_secs': 0.011782} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.140479] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7138f94-091e-4e45-ac6e-51ba92a9b7fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.148701] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1675.148701] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522c4952-2551-ec1a-ddae-9753bc550027" [ 1675.148701] env[63241]: _type = "Task" [ 1675.148701] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.159225] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522c4952-2551-ec1a-ddae-9753bc550027, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.193034] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820653, 'name': ReconfigVM_Task, 'duration_secs': 0.202389} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.193461] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance 'cb7eb689-b8f6-479d-aa6b-c27fab16e131' progress to 33 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1675.211271] env[63241]: DEBUG nova.policy [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53f691b52644488c832ce1224a079218', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e48fc59560ab47ae87be73ab11b13e7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1675.387653] env[63241]: DEBUG nova.compute.manager [req-f906eed6-33c1-4f16-a1af-e6e2e5cf692f req-1908abde-57d5-4bc4-bbf9-e418f23bbacb service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Received event network-vif-plugged-24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1675.389238] env[63241]: DEBUG oslo_concurrency.lockutils [req-f906eed6-33c1-4f16-a1af-e6e2e5cf692f req-1908abde-57d5-4bc4-bbf9-e418f23bbacb service nova] Acquiring lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.389238] env[63241]: DEBUG oslo_concurrency.lockutils [req-f906eed6-33c1-4f16-a1af-e6e2e5cf692f req-1908abde-57d5-4bc4-bbf9-e418f23bbacb service nova] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.389238] env[63241]: DEBUG oslo_concurrency.lockutils [req-f906eed6-33c1-4f16-a1af-e6e2e5cf692f req-1908abde-57d5-4bc4-bbf9-e418f23bbacb service nova] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.389238] env[63241]: DEBUG nova.compute.manager [req-f906eed6-33c1-4f16-a1af-e6e2e5cf692f req-1908abde-57d5-4bc4-bbf9-e418f23bbacb service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] No waiting events found dispatching network-vif-plugged-24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1675.389238] env[63241]: WARNING nova.compute.manager [req-f906eed6-33c1-4f16-a1af-e6e2e5cf692f req-1908abde-57d5-4bc4-bbf9-e418f23bbacb service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Received unexpected event network-vif-plugged-24131a23-55e1-4bd6-8813-5768da05438f for instance with vm_state building and task_state spawning. [ 1675.466370] env[63241]: DEBUG oslo_vmware.api [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820651, 'name': PowerOnVM_Task, 'duration_secs': 1.819956} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.466816] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1675.467340] env[63241]: INFO nova.compute.manager [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Took 17.38 seconds to spawn the instance on the hypervisor. [ 1675.468044] env[63241]: DEBUG nova.compute.manager [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1675.472018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01aa7e1-0128-46b8-b5e4-1d41c0506537 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.569987] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966ff784-e8f9-4e51-8168-b85c846a1445 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.578619] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a346fbdb-481b-46fa-b8dc-a6552e7da995 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.616079] env[63241]: DEBUG nova.compute.manager [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1675.621319] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7572f80-4140-411e-a6dc-43df05338a9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.623793] env[63241]: DEBUG nova.network.neutron [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Successfully updated port: 24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1675.631616] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fa368b-94e3-4c59-a9f5-6afe7cb73f50 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.648934] env[63241]: DEBUG nova.compute.provider_tree [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.660732] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522c4952-2551-ec1a-ddae-9753bc550027, 'name': SearchDatastore_Task, 'duration_secs': 0.014246} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.660984] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.663164] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 4a57d04b-72a0-4db3-8119-994b67e4b096/4a57d04b-72a0-4db3-8119-994b67e4b096.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1675.663164] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3a0666a-6bd9-42cf-b16a-60fd27d8c00c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.670076] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1675.670076] env[63241]: value = "task-1820654" [ 1675.670076] env[63241]: _type = "Task" [ 1675.670076] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.678082] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.703142] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1675.703142] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1675.703142] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1675.703142] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1675.703142] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1675.703142] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1675.703142] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1675.703142] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1675.703142] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1675.703142] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1675.703142] env[63241]: DEBUG nova.virt.hardware [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1675.708688] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Reconfiguring VM instance instance-00000045 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1675.709391] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e169fe46-ad46-4410-a866-644c95396416 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.728056] env[63241]: DEBUG nova.network.neutron [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Successfully created port: 315b118d-b5f6-4f70-9ea2-76028cc6344d {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1675.736296] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1675.736296] env[63241]: value = "task-1820655" [ 1675.736296] env[63241]: _type = "Task" [ 1675.736296] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.744654] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820655, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.997950] env[63241]: INFO nova.compute.manager [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Took 34.44 seconds to build instance. [ 1676.129646] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.129786] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.131589] env[63241]: DEBUG nova.network.neutron [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1676.156174] env[63241]: DEBUG nova.scheduler.client.report [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1676.182705] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820654, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.248391] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820655, 'name': ReconfigVM_Task, 'duration_secs': 0.183755} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.248945] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Reconfigured VM instance instance-00000045 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1676.249644] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0b6ded-b6d5-4e9b-8bc9-781db26d61d9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.274521] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] cb7eb689-b8f6-479d-aa6b-c27fab16e131/cb7eb689-b8f6-479d-aa6b-c27fab16e131.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1676.274871] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ac5d3d6-6dee-496e-8dc4-d97248193d71 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.296746] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1676.296746] env[63241]: value = "task-1820656" [ 1676.296746] env[63241]: _type = "Task" [ 1676.296746] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.307215] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820656, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.501387] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61cbd954-3ca3-4a3c-9653-f307b5475d54 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.950s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.634555] env[63241]: DEBUG nova.compute.manager [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1676.667236] env[63241]: DEBUG nova.virt.hardware [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1676.667236] env[63241]: DEBUG nova.virt.hardware [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1676.667236] env[63241]: DEBUG nova.virt.hardware [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1676.667440] env[63241]: DEBUG nova.virt.hardware [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1676.667644] env[63241]: DEBUG nova.virt.hardware [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1676.667705] env[63241]: DEBUG nova.virt.hardware [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1676.667905] env[63241]: DEBUG nova.virt.hardware [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1676.668121] env[63241]: DEBUG nova.virt.hardware [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1676.668281] env[63241]: DEBUG nova.virt.hardware [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1676.668445] env[63241]: DEBUG nova.virt.hardware [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1676.668613] env[63241]: DEBUG nova.virt.hardware [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1676.672018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.071s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.672018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d91cf31-111b-4d50-9651-ff999d2a357e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.676077] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.616s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.676077] env[63241]: DEBUG nova.objects.instance [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Lazy-loading 'resources' on Instance uuid effc3987-45d0-4305-83a2-0eba47d2c7fd {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1676.698361] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.735926} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.701760] env[63241]: DEBUG nova.network.neutron [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1676.704656] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32631c0-85f3-4c5d-bc52-09df1ef01d1b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.710778] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 4a57d04b-72a0-4db3-8119-994b67e4b096/4a57d04b-72a0-4db3-8119-994b67e4b096.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1676.710778] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1676.710953] env[63241]: INFO nova.scheduler.client.report [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleted allocations for instance 7158c64a-5036-419b-b110-7e22c12bf3dd [ 1676.711749] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da53811c-dfd6-4ffb-83dd-caf0a17f8b55 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.729270] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1676.729270] env[63241]: value = "task-1820657" [ 1676.729270] env[63241]: _type = "Task" [ 1676.729270] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.739154] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820657, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.806137] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820656, 'name': ReconfigVM_Task, 'duration_secs': 0.307708} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.806835] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Reconfigured VM instance instance-00000045 to attach disk [datastore1] cb7eb689-b8f6-479d-aa6b-c27fab16e131/cb7eb689-b8f6-479d-aa6b-c27fab16e131.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1676.807165] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance 'cb7eb689-b8f6-479d-aa6b-c27fab16e131' progress to 50 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1676.887312] env[63241]: DEBUG nova.network.neutron [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updating instance_info_cache with network_info: [{"id": "24131a23-55e1-4bd6-8813-5768da05438f", "address": "fa:16:3e:fa:8e:d4", "network": {"id": "355e2d29-1968-4065-94a6-f9e5946a75c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-154610021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d1a62ae45c74a7ba071363005b3a52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24131a23-55", "ovs_interfaceid": "24131a23-55e1-4bd6-8813-5768da05438f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.221642] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4ca1732f-d576-4be6-8855-73336e3e73f2 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "7158c64a-5036-419b-b110-7e22c12bf3dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.346s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.240105] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820657, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064271} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.240641] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1677.241957] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00904484-e24a-4c1c-8276-25f8a902e728 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.266973] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 4a57d04b-72a0-4db3-8119-994b67e4b096/4a57d04b-72a0-4db3-8119-994b67e4b096.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1677.270657] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-791c376e-2639-42e2-abb0-1716d35734db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.296805] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "381bba62-49a7-4d6f-b12a-741f5d884fe5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.296805] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "381bba62-49a7-4d6f-b12a-741f5d884fe5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1677.300731] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1677.300731] env[63241]: value = "task-1820658" [ 1677.300731] env[63241]: _type = "Task" [ 1677.300731] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.317032] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820658, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.317886] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b61763-882f-4600-83f7-0dcf95deead3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.348347] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b6191b-1e91-42ec-a53d-a80de488a784 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.368212] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance 'cb7eb689-b8f6-479d-aa6b-c27fab16e131' progress to 67 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1677.393195] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.393522] env[63241]: DEBUG nova.compute.manager [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Instance network_info: |[{"id": "24131a23-55e1-4bd6-8813-5768da05438f", "address": "fa:16:3e:fa:8e:d4", "network": {"id": "355e2d29-1968-4065-94a6-f9e5946a75c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-154610021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d1a62ae45c74a7ba071363005b3a52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24131a23-55", "ovs_interfaceid": "24131a23-55e1-4bd6-8813-5768da05438f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1677.394199] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:8e:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bfae3ef8-cae7-455d-8632-ba93e1671625', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24131a23-55e1-4bd6-8813-5768da05438f', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1677.402033] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Creating folder: Project (5d1a62ae45c74a7ba071363005b3a52e). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1677.402333] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8ebcae5-f621-4995-975b-c69866f9c8a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.413872] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Created folder: Project (5d1a62ae45c74a7ba071363005b3a52e) in parent group-v376927. [ 1677.413872] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Creating folder: Instances. Parent ref: group-v377133. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1677.414093] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89864178-4fbd-453f-a90c-2600739adc88 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.424010] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Created folder: Instances in parent group-v377133. [ 1677.424272] env[63241]: DEBUG oslo.service.loopingcall [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1677.424464] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1677.424906] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2a62401-0e3d-47d7-a77c-ad60a0e03042 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.446200] env[63241]: DEBUG nova.compute.manager [req-207b4b51-42c4-472c-81f1-ee5e257ec2db req-c28334e7-242a-4634-9303-87d9b7785900 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Received event network-changed-24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1677.446426] env[63241]: DEBUG nova.compute.manager [req-207b4b51-42c4-472c-81f1-ee5e257ec2db req-c28334e7-242a-4634-9303-87d9b7785900 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Refreshing instance network info cache due to event network-changed-24131a23-55e1-4bd6-8813-5768da05438f. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1677.446901] env[63241]: DEBUG oslo_concurrency.lockutils [req-207b4b51-42c4-472c-81f1-ee5e257ec2db req-c28334e7-242a-4634-9303-87d9b7785900 service nova] Acquiring lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.446901] env[63241]: DEBUG oslo_concurrency.lockutils [req-207b4b51-42c4-472c-81f1-ee5e257ec2db req-c28334e7-242a-4634-9303-87d9b7785900 service nova] Acquired lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.447115] env[63241]: DEBUG nova.network.neutron [req-207b4b51-42c4-472c-81f1-ee5e257ec2db req-c28334e7-242a-4634-9303-87d9b7785900 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Refreshing network info cache for port 24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1677.453504] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1677.453504] env[63241]: value = "task-1820661" [ 1677.453504] env[63241]: _type = "Task" [ 1677.453504] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.462935] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820661, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.647473] env[63241]: DEBUG nova.network.neutron [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Successfully updated port: 315b118d-b5f6-4f70-9ea2-76028cc6344d {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1677.685372] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1bdf897-5400-493c-8dcf-e0f47ca808ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.694125] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0671ca-2b01-4ac3-9544-e8356f8030b8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.729266] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b01b4e-9ff7-4fb1-86e7-db4d9715e378 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.737611] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f208c4c-9b73-490f-89b8-561180c8445f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.752460] env[63241]: DEBUG nova.compute.provider_tree [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1677.799231] env[63241]: DEBUG nova.compute.manager [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1677.811336] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820658, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.937020] env[63241]: DEBUG nova.network.neutron [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Port 5546d295-8d78-4143-b874-e6cc21c5945a binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1677.967743] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820661, 'name': CreateVM_Task, 'duration_secs': 0.376329} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.967946] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1677.968631] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.968793] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.969212] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1677.969628] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38aaccf2-76e8-41a9-973b-8afd69aa9cf7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.976674] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1677.976674] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5299a428-cdfa-6fbb-d400-54e7d7426d67" [ 1677.976674] env[63241]: _type = "Task" [ 1677.976674] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.983011] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5299a428-cdfa-6fbb-d400-54e7d7426d67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.152685] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "refresh_cache-c8f1ce16-70b7-41fd-8516-63198139c1cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.152750] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "refresh_cache-c8f1ce16-70b7-41fd-8516-63198139c1cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.152860] env[63241]: DEBUG nova.network.neutron [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1678.209791] env[63241]: DEBUG nova.network.neutron [req-207b4b51-42c4-472c-81f1-ee5e257ec2db req-c28334e7-242a-4634-9303-87d9b7785900 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updated VIF entry in instance network info cache for port 24131a23-55e1-4bd6-8813-5768da05438f. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1678.210184] env[63241]: DEBUG nova.network.neutron [req-207b4b51-42c4-472c-81f1-ee5e257ec2db req-c28334e7-242a-4634-9303-87d9b7785900 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updating instance_info_cache with network_info: [{"id": "24131a23-55e1-4bd6-8813-5768da05438f", "address": "fa:16:3e:fa:8e:d4", "network": {"id": "355e2d29-1968-4065-94a6-f9e5946a75c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-154610021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d1a62ae45c74a7ba071363005b3a52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24131a23-55", "ovs_interfaceid": "24131a23-55e1-4bd6-8813-5768da05438f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.255689] env[63241]: DEBUG nova.scheduler.client.report [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1678.320458] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820658, 'name': ReconfigVM_Task, 'duration_secs': 0.620183} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.320884] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 4a57d04b-72a0-4db3-8119-994b67e4b096/4a57d04b-72a0-4db3-8119-994b67e4b096.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1678.321826] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a8f0114-cd7b-4301-8191-2bfe6f7ec4e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.329626] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1678.329626] env[63241]: value = "task-1820662" [ 1678.329626] env[63241]: _type = "Task" [ 1678.329626] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.343074] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820662, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.425757] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.485318] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5299a428-cdfa-6fbb-d400-54e7d7426d67, 'name': SearchDatastore_Task, 'duration_secs': 0.009621} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.485645] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.485887] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1678.486151] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.486327] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.486514] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1678.486778] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69ab850d-cd81-4111-acd7-7a2d134e9729 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.497180] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1678.497440] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1678.498376] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dae84ae5-3e0e-40fb-ab45-ab8169d45c4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.504162] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1678.504162] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52785a4e-3286-389b-391e-316af68cc735" [ 1678.504162] env[63241]: _type = "Task" [ 1678.504162] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.511904] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52785a4e-3286-389b-391e-316af68cc735, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.703160] env[63241]: DEBUG nova.network.neutron [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1678.713366] env[63241]: DEBUG oslo_concurrency.lockutils [req-207b4b51-42c4-472c-81f1-ee5e257ec2db req-c28334e7-242a-4634-9303-87d9b7785900 service nova] Releasing lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.760227] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.084s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.762699] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.285s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.764366] env[63241]: INFO nova.compute.claims [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1678.793327] env[63241]: INFO nova.scheduler.client.report [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Deleted allocations for instance effc3987-45d0-4305-83a2-0eba47d2c7fd [ 1678.844488] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820662, 'name': Rename_Task, 'duration_secs': 0.341948} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.844760] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1678.844999] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33b6b555-dbe4-411e-912f-d1ced93800c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.851611] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1678.851611] env[63241]: value = "task-1820663" [ 1678.851611] env[63241]: _type = "Task" [ 1678.851611] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.860726] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820663, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.962049] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.962450] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.962450] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.966303] env[63241]: DEBUG nova.network.neutron [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Updating instance_info_cache with network_info: [{"id": "315b118d-b5f6-4f70-9ea2-76028cc6344d", "address": "fa:16:3e:8f:3f:fa", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap315b118d-b5", "ovs_interfaceid": "315b118d-b5f6-4f70-9ea2-76028cc6344d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1679.014999] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52785a4e-3286-389b-391e-316af68cc735, 'name': SearchDatastore_Task, 'duration_secs': 0.024762} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.015852] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41bfd777-c507-439b-87b0-a903c2ac897f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.021574] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1679.021574] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ebabb2-7cf6-0635-faf8-ebddc8fb1d97" [ 1679.021574] env[63241]: _type = "Task" [ 1679.021574] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.034554] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ebabb2-7cf6-0635-faf8-ebddc8fb1d97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.302879] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dd89c372-80e8-4ffd-a5e9-f4f5f5ba6366 tempest-ServerTagsTestJSON-1930844894 tempest-ServerTagsTestJSON-1930844894-project-member] Lock "effc3987-45d0-4305-83a2-0eba47d2c7fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.362752] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820663, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.468157] env[63241]: DEBUG nova.compute.manager [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Received event network-vif-plugged-315b118d-b5f6-4f70-9ea2-76028cc6344d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1679.468377] env[63241]: DEBUG oslo_concurrency.lockutils [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] Acquiring lock "c8f1ce16-70b7-41fd-8516-63198139c1cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.468582] env[63241]: DEBUG oslo_concurrency.lockutils [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] Lock "c8f1ce16-70b7-41fd-8516-63198139c1cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.468742] env[63241]: DEBUG oslo_concurrency.lockutils [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] Lock "c8f1ce16-70b7-41fd-8516-63198139c1cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.469018] env[63241]: DEBUG nova.compute.manager [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] No waiting events found dispatching network-vif-plugged-315b118d-b5f6-4f70-9ea2-76028cc6344d {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1679.469119] env[63241]: WARNING nova.compute.manager [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Received unexpected event network-vif-plugged-315b118d-b5f6-4f70-9ea2-76028cc6344d for instance with vm_state building and task_state spawning. [ 1679.469328] env[63241]: DEBUG nova.compute.manager [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Received event network-changed-315b118d-b5f6-4f70-9ea2-76028cc6344d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1679.469428] env[63241]: DEBUG nova.compute.manager [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Refreshing instance network info cache due to event network-changed-315b118d-b5f6-4f70-9ea2-76028cc6344d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1679.469565] env[63241]: DEBUG oslo_concurrency.lockutils [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] Acquiring lock "refresh_cache-c8f1ce16-70b7-41fd-8516-63198139c1cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.470842] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "refresh_cache-c8f1ce16-70b7-41fd-8516-63198139c1cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.470842] env[63241]: DEBUG nova.compute.manager [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Instance network_info: |[{"id": "315b118d-b5f6-4f70-9ea2-76028cc6344d", "address": "fa:16:3e:8f:3f:fa", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap315b118d-b5", "ovs_interfaceid": "315b118d-b5f6-4f70-9ea2-76028cc6344d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1679.471106] env[63241]: DEBUG oslo_concurrency.lockutils [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] Acquired lock "refresh_cache-c8f1ce16-70b7-41fd-8516-63198139c1cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.471106] env[63241]: DEBUG nova.network.neutron [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Refreshing network info cache for port 315b118d-b5f6-4f70-9ea2-76028cc6344d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1679.472481] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:3f:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '315b118d-b5f6-4f70-9ea2-76028cc6344d', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1679.483995] env[63241]: DEBUG oslo.service.loopingcall [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1679.488696] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1679.489217] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d913ae2c-89a2-4754-90af-c2dfdbec6d78 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.516775] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1679.516775] env[63241]: value = "task-1820664" [ 1679.516775] env[63241]: _type = "Task" [ 1679.516775] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.531546] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820664, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.537725] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ebabb2-7cf6-0635-faf8-ebddc8fb1d97, 'name': SearchDatastore_Task, 'duration_secs': 0.01415} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.538129] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.538265] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0e4a3b3a-4464-404f-9154-1ab6f97ae951/0e4a3b3a-4464-404f-9154-1ab6f97ae951.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1679.538910] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0aaf7131-f428-4af6-82e9-6dadba4a92f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.547467] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1679.547467] env[63241]: value = "task-1820665" [ 1679.547467] env[63241]: _type = "Task" [ 1679.547467] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.555473] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.865468] env[63241]: DEBUG oslo_vmware.api [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820663, 'name': PowerOnVM_Task, 'duration_secs': 0.896333} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.869234] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1679.870686] env[63241]: DEBUG nova.compute.manager [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1679.870686] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2e5a23-9fd5-4db6-ae24-de67e7440f00 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.035200] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820664, 'name': CreateVM_Task, 'duration_secs': 0.461957} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.039794] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1680.039794] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.039794] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.039794] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1680.039794] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-215b3099-63f2-4263-9051-b5222ed4dbaa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.043879] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1680.043879] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5271e958-10f3-6fe3-a3ab-41a2a2df2746" [ 1680.043879] env[63241]: _type = "Task" [ 1680.043879] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.052035] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.052146] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.052348] env[63241]: DEBUG nova.network.neutron [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1680.062552] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5271e958-10f3-6fe3-a3ab-41a2a2df2746, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.065717] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496725} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.065717] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0e4a3b3a-4464-404f-9154-1ab6f97ae951/0e4a3b3a-4464-404f-9154-1ab6f97ae951.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1680.066512] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1680.066762] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26cd2c44-1c44-47f6-9154-7c2a51865eab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.078134] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1680.078134] env[63241]: value = "task-1820666" [ 1680.078134] env[63241]: _type = "Task" [ 1680.078134] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.086655] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820666, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.228888] env[63241]: DEBUG nova.network.neutron [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Updated VIF entry in instance network info cache for port 315b118d-b5f6-4f70-9ea2-76028cc6344d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1680.229268] env[63241]: DEBUG nova.network.neutron [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Updating instance_info_cache with network_info: [{"id": "315b118d-b5f6-4f70-9ea2-76028cc6344d", "address": "fa:16:3e:8f:3f:fa", "network": {"id": "cd2ecc8a-0a76-48a5-9162-4eac798ab55d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1197708022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e48fc59560ab47ae87be73ab11b13e7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap315b118d-b5", "ovs_interfaceid": "315b118d-b5f6-4f70-9ea2-76028cc6344d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.276038] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621352e7-f9dc-419d-a31e-4dae7d1c786a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.283579] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76dcd92-6d23-4ec4-a78f-6dcfc4347079 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.315370] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f575887-9126-40e6-8339-a68ebd8c6477 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.324607] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a689ce-62a0-48a8-a3b7-e8653bcfba44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.340043] env[63241]: DEBUG nova.compute.provider_tree [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1680.395576] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.556681] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5271e958-10f3-6fe3-a3ab-41a2a2df2746, 'name': SearchDatastore_Task, 'duration_secs': 0.019225} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.557501] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.557501] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1680.557501] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.557679] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.557790] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1680.560938] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d052303c-3070-430e-ab36-916d163e02e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.571201] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1680.571398] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1680.572442] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dbaf1cb-7797-412e-bbd8-45b26bcb18fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.578427] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1680.578427] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52700b1a-802d-7db0-2e6e-d834f1ea6901" [ 1680.578427] env[63241]: _type = "Task" [ 1680.578427] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.590645] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52700b1a-802d-7db0-2e6e-d834f1ea6901, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.594402] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820666, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067587} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.594657] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1680.595491] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0a622e-4ffb-48bc-b79c-23c6f6e861b8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.623785] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 0e4a3b3a-4464-404f-9154-1ab6f97ae951/0e4a3b3a-4464-404f-9154-1ab6f97ae951.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1680.626446] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84ee7c99-9fd2-4dfa-b8e0-564a228db83c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.647345] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1680.647345] env[63241]: value = "task-1820667" [ 1680.647345] env[63241]: _type = "Task" [ 1680.647345] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.656420] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820667, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.732661] env[63241]: DEBUG oslo_concurrency.lockutils [req-d9406d10-d91d-4659-abf3-9223d774b40d req-e912e7d3-d56f-4d2a-af42-d050d1a37c6d service nova] Releasing lock "refresh_cache-c8f1ce16-70b7-41fd-8516-63198139c1cc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.846019] env[63241]: DEBUG nova.scheduler.client.report [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1680.874549] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "4a57d04b-72a0-4db3-8119-994b67e4b096" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.877692] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "4a57d04b-72a0-4db3-8119-994b67e4b096" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.877692] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "4a57d04b-72a0-4db3-8119-994b67e4b096-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.877692] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "4a57d04b-72a0-4db3-8119-994b67e4b096-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.877692] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "4a57d04b-72a0-4db3-8119-994b67e4b096-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.878991] env[63241]: INFO nova.compute.manager [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Terminating instance [ 1680.881865] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "refresh_cache-4a57d04b-72a0-4db3-8119-994b67e4b096" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.882033] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquired lock "refresh_cache-4a57d04b-72a0-4db3-8119-994b67e4b096" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.882238] env[63241]: DEBUG nova.network.neutron [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1680.895528] env[63241]: DEBUG nova.network.neutron [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance_info_cache with network_info: [{"id": "5546d295-8d78-4143-b874-e6cc21c5945a", "address": "fa:16:3e:8b:83:aa", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5546d295-8d", "ovs_interfaceid": "5546d295-8d78-4143-b874-e6cc21c5945a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1681.094729] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52700b1a-802d-7db0-2e6e-d834f1ea6901, 'name': SearchDatastore_Task, 'duration_secs': 0.011765} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.097100] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-075ff832-71af-42d1-a76f-cb659f132bc1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.103284] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1681.103284] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52603412-4884-22ab-7ac8-6b172c29d926" [ 1681.103284] env[63241]: _type = "Task" [ 1681.103284] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.113847] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52603412-4884-22ab-7ac8-6b172c29d926, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.158012] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820667, 'name': ReconfigVM_Task, 'duration_secs': 0.343086} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.158629] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 0e4a3b3a-4464-404f-9154-1ab6f97ae951/0e4a3b3a-4464-404f-9154-1ab6f97ae951.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1681.159326] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb8d5a44-d6e1-4944-8743-faa32c5652a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.168406] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1681.168406] env[63241]: value = "task-1820668" [ 1681.168406] env[63241]: _type = "Task" [ 1681.168406] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.177718] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820668, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.348944] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.349596] env[63241]: DEBUG nova.compute.manager [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1681.352350] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.339s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.353675] env[63241]: INFO nova.compute.claims [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1681.402613] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.413573] env[63241]: DEBUG nova.network.neutron [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1681.484935] env[63241]: DEBUG nova.network.neutron [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1681.516772] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquiring lock "f65e5b00-38b5-4453-b370-1f56f18053eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.516772] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Lock "f65e5b00-38b5-4453-b370-1f56f18053eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.614092] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52603412-4884-22ab-7ac8-6b172c29d926, 'name': SearchDatastore_Task, 'duration_secs': 0.025067} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.614547] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.615400] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c8f1ce16-70b7-41fd-8516-63198139c1cc/c8f1ce16-70b7-41fd-8516-63198139c1cc.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1681.615871] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e5c869a-8186-434b-ad8e-40709b5af761 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.630674] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1681.630674] env[63241]: value = "task-1820669" [ 1681.630674] env[63241]: _type = "Task" [ 1681.630674] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.638099] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.684623] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820668, 'name': Rename_Task, 'duration_secs': 0.149055} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.684623] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1681.684623] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-feeb8d36-f4a8-4d4e-9097-d2a680e6adcc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.688453] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1681.688453] env[63241]: value = "task-1820670" [ 1681.688453] env[63241]: _type = "Task" [ 1681.688453] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.697394] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.859360] env[63241]: DEBUG nova.compute.utils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1681.862741] env[63241]: DEBUG nova.compute.manager [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1681.862907] env[63241]: DEBUG nova.network.neutron [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1681.906752] env[63241]: DEBUG nova.policy [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6b1519467304fc5bb33d508c55348be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '854490ce445a413d85901cfe6b091346', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1681.940448] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399d6dcb-fe5e-43b1-b06d-c5bdcd34ebb9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.968627] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88adf36-de93-48b6-906d-a8f84373660f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.978899] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance 'cb7eb689-b8f6-479d-aa6b-c27fab16e131' progress to 83 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1681.987983] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Releasing lock "refresh_cache-4a57d04b-72a0-4db3-8119-994b67e4b096" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.988567] env[63241]: DEBUG nova.compute.manager [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1681.988896] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1681.990033] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecb288b-bc07-4561-adb1-d2466cc3fb4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.998545] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1681.998899] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26317919-6941-4b5c-9b78-815c6888f784 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.006630] env[63241]: DEBUG oslo_vmware.api [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1682.006630] env[63241]: value = "task-1820671" [ 1682.006630] env[63241]: _type = "Task" [ 1682.006630] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.018143] env[63241]: DEBUG nova.compute.manager [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1682.020912] env[63241]: DEBUG oslo_vmware.api [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.140680] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820669, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.203106] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820670, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.354555] env[63241]: DEBUG nova.network.neutron [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Successfully created port: fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1682.363785] env[63241]: DEBUG nova.compute.manager [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1682.485535] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1682.488199] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe01470f-608b-40f2-b65f-a1f2d1ae95c1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.495484] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1682.495484] env[63241]: value = "task-1820672" [ 1682.495484] env[63241]: _type = "Task" [ 1682.495484] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.503194] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820672, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.516276] env[63241]: DEBUG oslo_vmware.api [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820671, 'name': PowerOffVM_Task, 'duration_secs': 0.290989} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.518877] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1682.519072] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1682.520276] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce97365f-68c3-4168-994e-e8db1dabb0f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.539315] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.545715] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1682.545925] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1682.546119] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Deleting the datastore file [datastore1] 4a57d04b-72a0-4db3-8119-994b67e4b096 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1682.546383] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3bb28e07-6491-4550-bfaa-03a4fc59b419 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.555726] env[63241]: DEBUG oslo_vmware.api [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1682.555726] env[63241]: value = "task-1820674" [ 1682.555726] env[63241]: _type = "Task" [ 1682.555726] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.564261] env[63241]: DEBUG oslo_vmware.api [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.640558] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820669, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631204} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.640880] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c8f1ce16-70b7-41fd-8516-63198139c1cc/c8f1ce16-70b7-41fd-8516-63198139c1cc.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1682.641059] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1682.641356] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8b6cc8b6-7cb3-4492-a81e-84f5ac636cb4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.647783] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1682.647783] env[63241]: value = "task-1820675" [ 1682.647783] env[63241]: _type = "Task" [ 1682.647783] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.656248] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820675, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.698578] env[63241]: DEBUG oslo_vmware.api [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820670, 'name': PowerOnVM_Task, 'duration_secs': 0.528945} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.699814] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1682.699814] env[63241]: INFO nova.compute.manager [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Took 8.56 seconds to spawn the instance on the hypervisor. [ 1682.699814] env[63241]: DEBUG nova.compute.manager [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1682.700319] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677aadac-be3b-458f-8787-3ad8bd381394 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.894615] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72878d46-d06c-4127-a1f2-0bee97a6ab25 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.902129] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7087de9d-6a29-4181-8d10-51034fd13efa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.933650] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940a1158-d2fc-4b85-8f2e-18915e865ceb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.946209] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e518244-671f-4eaa-834e-cb8ac0d6a1f4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.960614] env[63241]: DEBUG nova.compute.provider_tree [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1683.006220] env[63241]: DEBUG oslo_vmware.api [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820672, 'name': PowerOnVM_Task, 'duration_secs': 0.467775} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.006509] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1683.006694] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d30b1bef-dffe-41e8-90a3-f72a07b49949 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance 'cb7eb689-b8f6-479d-aa6b-c27fab16e131' progress to 100 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1683.067159] env[63241]: DEBUG oslo_vmware.api [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240066} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.067426] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.067612] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1683.067791] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1683.067964] env[63241]: INFO nova.compute.manager [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1683.068224] env[63241]: DEBUG oslo.service.loopingcall [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.068417] env[63241]: DEBUG nova.compute.manager [-] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1683.068513] env[63241]: DEBUG nova.network.neutron [-] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1683.085965] env[63241]: DEBUG nova.network.neutron [-] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1683.156977] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820675, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180087} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.157614] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1683.159413] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d831f6ac-0ad3-4e16-8640-b0a5934e658e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.193941] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] c8f1ce16-70b7-41fd-8516-63198139c1cc/c8f1ce16-70b7-41fd-8516-63198139c1cc.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1683.194634] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a980cc62-c9cb-4b99-bf45-be64fc0393e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.219182] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1683.219182] env[63241]: value = "task-1820676" [ 1683.219182] env[63241]: _type = "Task" [ 1683.219182] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.224798] env[63241]: INFO nova.compute.manager [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Took 26.65 seconds to build instance. [ 1683.231264] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820676, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.382937] env[63241]: DEBUG nova.compute.manager [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1683.412257] env[63241]: DEBUG nova.virt.hardware [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1683.412533] env[63241]: DEBUG nova.virt.hardware [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1683.412713] env[63241]: DEBUG nova.virt.hardware [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1683.413191] env[63241]: DEBUG nova.virt.hardware [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1683.413191] env[63241]: DEBUG nova.virt.hardware [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1683.413569] env[63241]: DEBUG nova.virt.hardware [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1683.413869] env[63241]: DEBUG nova.virt.hardware [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1683.414115] env[63241]: DEBUG nova.virt.hardware [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1683.414392] env[63241]: DEBUG nova.virt.hardware [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1683.414625] env[63241]: DEBUG nova.virt.hardware [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1683.414876] env[63241]: DEBUG nova.virt.hardware [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1683.415796] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de672abe-ec3a-4047-8ebd-80bf626d5275 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.424455] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6b8af4-d6b9-4899-afe0-caba37159bc2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.463452] env[63241]: DEBUG nova.scheduler.client.report [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1683.588703] env[63241]: DEBUG nova.network.neutron [-] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.729841] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0d3f1c6c-660f-4114-a7c1-9443d3a68be7 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.179s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.730129] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820676, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.914721] env[63241]: DEBUG nova.compute.manager [req-b3ddb42c-f7f4-4bd7-9766-40a41525840b req-7761d231-867e-4c2b-9dca-ffd124bbbc05 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Received event network-vif-plugged-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1683.914943] env[63241]: DEBUG oslo_concurrency.lockutils [req-b3ddb42c-f7f4-4bd7-9766-40a41525840b req-7761d231-867e-4c2b-9dca-ffd124bbbc05 service nova] Acquiring lock "1e172f73-972e-4401-b358-512f7e03b27f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.915179] env[63241]: DEBUG oslo_concurrency.lockutils [req-b3ddb42c-f7f4-4bd7-9766-40a41525840b req-7761d231-867e-4c2b-9dca-ffd124bbbc05 service nova] Lock "1e172f73-972e-4401-b358-512f7e03b27f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.915404] env[63241]: DEBUG oslo_concurrency.lockutils [req-b3ddb42c-f7f4-4bd7-9766-40a41525840b req-7761d231-867e-4c2b-9dca-ffd124bbbc05 service nova] Lock "1e172f73-972e-4401-b358-512f7e03b27f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.915584] env[63241]: DEBUG nova.compute.manager [req-b3ddb42c-f7f4-4bd7-9766-40a41525840b req-7761d231-867e-4c2b-9dca-ffd124bbbc05 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] No waiting events found dispatching network-vif-plugged-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1683.915748] env[63241]: WARNING nova.compute.manager [req-b3ddb42c-f7f4-4bd7-9766-40a41525840b req-7761d231-867e-4c2b-9dca-ffd124bbbc05 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Received unexpected event network-vif-plugged-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea for instance with vm_state building and task_state spawning. [ 1683.969103] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.617s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.969635] env[63241]: DEBUG nova.compute.manager [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1683.972448] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.534s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.974535] env[63241]: INFO nova.compute.claims [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1684.097064] env[63241]: INFO nova.compute.manager [-] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Took 1.03 seconds to deallocate network for instance. [ 1684.215241] env[63241]: DEBUG nova.network.neutron [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Successfully updated port: fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1684.233212] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820676, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.482119] env[63241]: DEBUG nova.compute.utils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1684.483715] env[63241]: DEBUG nova.compute.manager [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1684.483994] env[63241]: DEBUG nova.network.neutron [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1684.567424] env[63241]: DEBUG nova.policy [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbc8bc30657b4647ba951c4b826ad5c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2338858c7214e8286c5948da80ffc1b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1684.601158] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.721996] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.721996] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.721996] env[63241]: DEBUG nova.network.neutron [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1684.740218] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820676, 'name': ReconfigVM_Task, 'duration_secs': 1.034792} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.743165] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Reconfigured VM instance instance-0000004d to attach disk [datastore1] c8f1ce16-70b7-41fd-8516-63198139c1cc/c8f1ce16-70b7-41fd-8516-63198139c1cc.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1684.746459] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-22e0a7cd-0988-4ce8-9619-05ddd730b723 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.765988] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1684.765988] env[63241]: value = "task-1820677" [ 1684.765988] env[63241]: _type = "Task" [ 1684.765988] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.784083] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820677, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.990580] env[63241]: DEBUG nova.compute.manager [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1685.030015] env[63241]: DEBUG nova.network.neutron [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Successfully created port: 68ec05cb-7eaf-4904-b491-0f5d3bb27936 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1685.284550] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820677, 'name': Rename_Task, 'duration_secs': 0.162016} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.287651] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1685.287825] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-808a35cf-06bf-4dd0-aae8-d7c0cf7e16dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.295318] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1685.295318] env[63241]: value = "task-1820678" [ 1685.295318] env[63241]: _type = "Task" [ 1685.295318] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.307492] env[63241]: DEBUG nova.network.neutron [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1685.316932] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820678, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.370164] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "31998a62-70f5-4205-89b9-df8312916126" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.370426] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "31998a62-70f5-4205-89b9-df8312916126" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.476369] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514cbf30-9520-409a-84f1-0c135778f30b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.486998] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad18b6e-8381-4471-910f-95e29d547169 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.534891] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219fc6c9-ba29-40e9-a24e-486cb0de0d89 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.545073] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c81705-92ba-4dbc-a1eb-11135b661df8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.559720] env[63241]: DEBUG nova.compute.provider_tree [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1685.705466] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.707022] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.707022] env[63241]: DEBUG nova.compute.manager [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Going to confirm migration 2 {{(pid=63241) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1685.710370] env[63241]: DEBUG nova.network.neutron [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updating instance_info_cache with network_info: [{"id": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "address": "fa:16:3e:17:1d:18", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbeb829e-4c", "ovs_interfaceid": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.805673] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820678, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.875936] env[63241]: DEBUG nova.compute.manager [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1686.001493] env[63241]: DEBUG nova.compute.manager [req-76a1e254-740d-425f-a77e-6bf9fbdc0d85 req-2113b6ec-20a2-478d-8335-0aab29bc37fb service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Received event network-changed-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1686.004997] env[63241]: DEBUG nova.compute.manager [req-76a1e254-740d-425f-a77e-6bf9fbdc0d85 req-2113b6ec-20a2-478d-8335-0aab29bc37fb service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Refreshing instance network info cache due to event network-changed-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1686.005355] env[63241]: DEBUG oslo_concurrency.lockutils [req-76a1e254-740d-425f-a77e-6bf9fbdc0d85 req-2113b6ec-20a2-478d-8335-0aab29bc37fb service nova] Acquiring lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.035518] env[63241]: DEBUG nova.compute.manager [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1686.061643] env[63241]: DEBUG nova.virt.hardware [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1686.061778] env[63241]: DEBUG nova.virt.hardware [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1686.061947] env[63241]: DEBUG nova.virt.hardware [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1686.062134] env[63241]: DEBUG nova.virt.hardware [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1686.062309] env[63241]: DEBUG nova.virt.hardware [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1686.062474] env[63241]: DEBUG nova.virt.hardware [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1686.062688] env[63241]: DEBUG nova.virt.hardware [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1686.062847] env[63241]: DEBUG nova.virt.hardware [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1686.063021] env[63241]: DEBUG nova.virt.hardware [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1686.063187] env[63241]: DEBUG nova.virt.hardware [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1686.063360] env[63241]: DEBUG nova.virt.hardware [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1686.064231] env[63241]: DEBUG nova.scheduler.client.report [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1686.071023] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5158d9-6204-477f-8622-e3ed41fbddeb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.077200] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b957262c-19dd-4b6f-9ab3-f7239b1b12ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.214286] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.214672] env[63241]: DEBUG nova.compute.manager [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Instance network_info: |[{"id": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "address": "fa:16:3e:17:1d:18", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbeb829e-4c", "ovs_interfaceid": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1686.214976] env[63241]: DEBUG oslo_concurrency.lockutils [req-76a1e254-740d-425f-a77e-6bf9fbdc0d85 req-2113b6ec-20a2-478d-8335-0aab29bc37fb service nova] Acquired lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.215182] env[63241]: DEBUG nova.network.neutron [req-76a1e254-740d-425f-a77e-6bf9fbdc0d85 req-2113b6ec-20a2-478d-8335-0aab29bc37fb service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Refreshing network info cache for port fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1686.216564] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:1d:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '095fbf26-7367-4f9e-87c5-2965b64b0b0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbeb829e-4c31-429b-bdb0-ecb7331ef4ea', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1686.225972] env[63241]: DEBUG oslo.service.loopingcall [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1686.227337] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1686.227568] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50ff838d-5140-47a9-bf75-fa83b8ab08be {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.247909] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1686.247909] env[63241]: value = "task-1820679" [ 1686.247909] env[63241]: _type = "Task" [ 1686.247909] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.256080] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820679, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.261675] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.261855] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.262043] env[63241]: DEBUG nova.network.neutron [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1686.262278] env[63241]: DEBUG nova.objects.instance [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lazy-loading 'info_cache' on Instance uuid cb7eb689-b8f6-479d-aa6b-c27fab16e131 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1686.306660] env[63241]: DEBUG oslo_vmware.api [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820678, 'name': PowerOnVM_Task, 'duration_secs': 1.001645} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.307704] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1686.307849] env[63241]: INFO nova.compute.manager [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Took 9.68 seconds to spawn the instance on the hypervisor. [ 1686.308317] env[63241]: DEBUG nova.compute.manager [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1686.309937] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242603ea-dab9-4e21-a399-c9db8dce14ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.403852] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.425297] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.425560] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.572669] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.600s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.573190] env[63241]: DEBUG nova.compute.manager [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1686.575912] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.622s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.577251] env[63241]: INFO nova.compute.claims [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1686.758426] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820679, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.828734] env[63241]: INFO nova.compute.manager [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Took 29.12 seconds to build instance. [ 1686.927722] env[63241]: DEBUG nova.compute.manager [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1687.082209] env[63241]: DEBUG nova.compute.utils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1687.090561] env[63241]: DEBUG nova.compute.manager [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1687.090561] env[63241]: DEBUG nova.network.neutron [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1687.203944] env[63241]: DEBUG nova.network.neutron [req-76a1e254-740d-425f-a77e-6bf9fbdc0d85 req-2113b6ec-20a2-478d-8335-0aab29bc37fb service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updated VIF entry in instance network info cache for port fbeb829e-4c31-429b-bdb0-ecb7331ef4ea. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1687.204422] env[63241]: DEBUG nova.network.neutron [req-76a1e254-740d-425f-a77e-6bf9fbdc0d85 req-2113b6ec-20a2-478d-8335-0aab29bc37fb service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updating instance_info_cache with network_info: [{"id": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "address": "fa:16:3e:17:1d:18", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbeb829e-4c", "ovs_interfaceid": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.273627] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820679, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.330740] env[63241]: DEBUG oslo_concurrency.lockutils [None req-76a61eb7-1737-4577-8660-23b18d7e9d6f tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "c8f1ce16-70b7-41fd-8516-63198139c1cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.625s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.344645] env[63241]: DEBUG nova.policy [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbc8bc30657b4647ba951c4b826ad5c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2338858c7214e8286c5948da80ffc1b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1687.456507] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.477235] env[63241]: DEBUG nova.network.neutron [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Successfully updated port: 68ec05cb-7eaf-4904-b491-0f5d3bb27936 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1687.588434] env[63241]: DEBUG nova.compute.manager [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1687.624718] env[63241]: DEBUG nova.network.neutron [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Successfully created port: 328d662e-d2e8-4f8a-94b4-dacebf42accf {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1687.704840] env[63241]: DEBUG nova.network.neutron [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance_info_cache with network_info: [{"id": "5546d295-8d78-4143-b874-e6cc21c5945a", "address": "fa:16:3e:8b:83:aa", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5546d295-8d", "ovs_interfaceid": "5546d295-8d78-4143-b874-e6cc21c5945a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.706935] env[63241]: DEBUG oslo_concurrency.lockutils [req-76a1e254-740d-425f-a77e-6bf9fbdc0d85 req-2113b6ec-20a2-478d-8335-0aab29bc37fb service nova] Releasing lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.765658] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820679, 'name': CreateVM_Task, 'duration_secs': 1.066118} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.765986] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1687.766775] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.767107] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.767515] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1687.767871] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99ddd51e-1724-4961-8662-84e01fa9b4e5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.774639] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1687.774639] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e83efb-23a6-17d6-6b1e-ebf25cce4541" [ 1687.774639] env[63241]: _type = "Task" [ 1687.774639] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.782922] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e83efb-23a6-17d6-6b1e-ebf25cce4541, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.979884] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "refresh_cache-f372d405-f7d5-4e5f-8c36-fe9651af2a0d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.979884] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired lock "refresh_cache-f372d405-f7d5-4e5f-8c36-fe9651af2a0d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.979884] env[63241]: DEBUG nova.network.neutron [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1688.011368] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b84e755-1a2d-4c4f-aaff-f5e969698bbc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.020983] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ff8df1-0a38-447e-a538-34465fd12078 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.053927] env[63241]: DEBUG nova.compute.manager [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Received event network-vif-plugged-68ec05cb-7eaf-4904-b491-0f5d3bb27936 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1688.054168] env[63241]: DEBUG oslo_concurrency.lockutils [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] Acquiring lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.054403] env[63241]: DEBUG oslo_concurrency.lockutils [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] Lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.054581] env[63241]: DEBUG oslo_concurrency.lockutils [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] Lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.054747] env[63241]: DEBUG nova.compute.manager [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] No waiting events found dispatching network-vif-plugged-68ec05cb-7eaf-4904-b491-0f5d3bb27936 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1688.055698] env[63241]: WARNING nova.compute.manager [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Received unexpected event network-vif-plugged-68ec05cb-7eaf-4904-b491-0f5d3bb27936 for instance with vm_state building and task_state spawning. [ 1688.055954] env[63241]: DEBUG nova.compute.manager [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Received event network-changed-68ec05cb-7eaf-4904-b491-0f5d3bb27936 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1688.056151] env[63241]: DEBUG nova.compute.manager [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Refreshing instance network info cache due to event network-changed-68ec05cb-7eaf-4904-b491-0f5d3bb27936. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1688.056412] env[63241]: DEBUG oslo_concurrency.lockutils [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] Acquiring lock "refresh_cache-f372d405-f7d5-4e5f-8c36-fe9651af2a0d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.057175] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc83b681-ec7a-4185-a323-b224e9bf44eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.068162] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cbb4fc-f487-49fe-8494-e2c7c038f58c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.081957] env[63241]: DEBUG nova.compute.provider_tree [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1688.209326] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "refresh_cache-cb7eb689-b8f6-479d-aa6b-c27fab16e131" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.209727] env[63241]: DEBUG nova.objects.instance [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lazy-loading 'migration_context' on Instance uuid cb7eb689-b8f6-479d-aa6b-c27fab16e131 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1688.284802] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e83efb-23a6-17d6-6b1e-ebf25cce4541, 'name': SearchDatastore_Task, 'duration_secs': 0.009838} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.285130] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.285889] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1688.285889] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.285889] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.286158] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1688.286665] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48a623cf-427d-4292-801a-4dcf1e124253 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.295085] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1688.295245] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1688.296038] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b53e428c-28b4-42dc-b53e-b7f4404ea1c4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.301241] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1688.301241] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523b59f2-f7e6-458f-1712-eb6dee72cc27" [ 1688.301241] env[63241]: _type = "Task" [ 1688.301241] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.308853] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523b59f2-f7e6-458f-1712-eb6dee72cc27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.529034] env[63241]: DEBUG nova.network.neutron [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1688.585765] env[63241]: DEBUG nova.scheduler.client.report [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1688.599999] env[63241]: DEBUG nova.compute.manager [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1688.629127] env[63241]: DEBUG nova.virt.hardware [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1688.629444] env[63241]: DEBUG nova.virt.hardware [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1688.629815] env[63241]: DEBUG nova.virt.hardware [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1688.630152] env[63241]: DEBUG nova.virt.hardware [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1688.630410] env[63241]: DEBUG nova.virt.hardware [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1688.630689] env[63241]: DEBUG nova.virt.hardware [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1688.631112] env[63241]: DEBUG nova.virt.hardware [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1688.631395] env[63241]: DEBUG nova.virt.hardware [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1688.633028] env[63241]: DEBUG nova.virt.hardware [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1688.633028] env[63241]: DEBUG nova.virt.hardware [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1688.633028] env[63241]: DEBUG nova.virt.hardware [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1688.633259] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0110d8bc-cca6-47bf-b77b-ab3656102f39 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.643748] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a94909e-35fe-4411-82a8-c0e84d615a83 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.712687] env[63241]: DEBUG nova.objects.base [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1688.713672] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7c2039-3430-4702-a07d-1daab1b94c12 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.717432] env[63241]: DEBUG nova.network.neutron [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Updating instance_info_cache with network_info: [{"id": "68ec05cb-7eaf-4904-b491-0f5d3bb27936", "address": "fa:16:3e:3b:43:74", "network": {"id": "909278be-6b42-43b3-ae32-decd720bc5f5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138389207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2338858c7214e8286c5948da80ffc1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ec05cb-7e", "ovs_interfaceid": "68ec05cb-7eaf-4904-b491-0f5d3bb27936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.737604] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a64cc11c-48b1-4848-87b0-9238ec063984 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.743902] env[63241]: DEBUG oslo_vmware.api [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1688.743902] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529e8d92-70d3-4ce8-2cbe-921bebd8f8bd" [ 1688.743902] env[63241]: _type = "Task" [ 1688.743902] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.752647] env[63241]: DEBUG oslo_vmware.api [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529e8d92-70d3-4ce8-2cbe-921bebd8f8bd, 'name': SearchDatastore_Task, 'duration_secs': 0.006754} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.752914] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.811263] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523b59f2-f7e6-458f-1712-eb6dee72cc27, 'name': SearchDatastore_Task, 'duration_secs': 0.008134} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.812050] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bb803b4-d36a-4eb5-af21-dadaf9f5035a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.817128] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1688.817128] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a20818-d0d2-9875-3999-b2360ec9b33b" [ 1688.817128] env[63241]: _type = "Task" [ 1688.817128] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.824509] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a20818-d0d2-9875-3999-b2360ec9b33b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.013337] env[63241]: DEBUG nova.compute.manager [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1689.013337] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a3fb31-ac97-46f9-a663-007a5f0f19a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.071221] env[63241]: DEBUG nova.compute.manager [req-a1f60c2d-b1c6-4f12-a55d-799dbaacfba6 req-7bfd25a7-b9cc-4ec4-99ab-b56604d73683 service nova] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Received event network-vif-plugged-328d662e-d2e8-4f8a-94b4-dacebf42accf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1689.071221] env[63241]: DEBUG oslo_concurrency.lockutils [req-a1f60c2d-b1c6-4f12-a55d-799dbaacfba6 req-7bfd25a7-b9cc-4ec4-99ab-b56604d73683 service nova] Acquiring lock "73ea6bff-60da-4691-a569-f4e9ae92f701-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.071565] env[63241]: DEBUG oslo_concurrency.lockutils [req-a1f60c2d-b1c6-4f12-a55d-799dbaacfba6 req-7bfd25a7-b9cc-4ec4-99ab-b56604d73683 service nova] Lock "73ea6bff-60da-4691-a569-f4e9ae92f701-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.073626] env[63241]: DEBUG oslo_concurrency.lockutils [req-a1f60c2d-b1c6-4f12-a55d-799dbaacfba6 req-7bfd25a7-b9cc-4ec4-99ab-b56604d73683 service nova] Lock "73ea6bff-60da-4691-a569-f4e9ae92f701-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.073626] env[63241]: DEBUG nova.compute.manager [req-a1f60c2d-b1c6-4f12-a55d-799dbaacfba6 req-7bfd25a7-b9cc-4ec4-99ab-b56604d73683 service nova] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] No waiting events found dispatching network-vif-plugged-328d662e-d2e8-4f8a-94b4-dacebf42accf {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1689.073626] env[63241]: WARNING nova.compute.manager [req-a1f60c2d-b1c6-4f12-a55d-799dbaacfba6 req-7bfd25a7-b9cc-4ec4-99ab-b56604d73683 service nova] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Received unexpected event network-vif-plugged-328d662e-d2e8-4f8a-94b4-dacebf42accf for instance with vm_state building and task_state spawning. [ 1689.091792] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.515s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.092298] env[63241]: DEBUG nova.compute.manager [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1689.095733] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.672s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.098183] env[63241]: INFO nova.compute.claims [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1689.174815] env[63241]: DEBUG nova.network.neutron [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Successfully updated port: 328d662e-d2e8-4f8a-94b4-dacebf42accf {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1689.219723] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Releasing lock "refresh_cache-f372d405-f7d5-4e5f-8c36-fe9651af2a0d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.220120] env[63241]: DEBUG nova.compute.manager [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Instance network_info: |[{"id": "68ec05cb-7eaf-4904-b491-0f5d3bb27936", "address": "fa:16:3e:3b:43:74", "network": {"id": "909278be-6b42-43b3-ae32-decd720bc5f5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138389207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2338858c7214e8286c5948da80ffc1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ec05cb-7e", "ovs_interfaceid": "68ec05cb-7eaf-4904-b491-0f5d3bb27936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1689.220862] env[63241]: DEBUG oslo_concurrency.lockutils [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] Acquired lock "refresh_cache-f372d405-f7d5-4e5f-8c36-fe9651af2a0d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.220862] env[63241]: DEBUG nova.network.neutron [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Refreshing network info cache for port 68ec05cb-7eaf-4904-b491-0f5d3bb27936 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1689.222029] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:43:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '447ff42d-b33e-4b5d-8b7f-e8117ebbbc92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68ec05cb-7eaf-4904-b491-0f5d3bb27936', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1689.230189] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Creating folder: Project (c2338858c7214e8286c5948da80ffc1b). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1689.231052] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9bcdddf3-18b4-452c-a4fe-d7fe2d44bfba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.243492] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Created folder: Project (c2338858c7214e8286c5948da80ffc1b) in parent group-v376927. [ 1689.243723] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Creating folder: Instances. Parent ref: group-v377138. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1689.243919] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3bdb721e-e16c-442e-ba7d-142f131ee89f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.253871] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Created folder: Instances in parent group-v377138. [ 1689.254128] env[63241]: DEBUG oslo.service.loopingcall [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1689.254326] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1689.254561] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02833a2b-e24e-4805-967a-971205d212bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.274672] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1689.274672] env[63241]: value = "task-1820682" [ 1689.274672] env[63241]: _type = "Task" [ 1689.274672] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.283339] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820682, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.328348] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a20818-d0d2-9875-3999-b2360ec9b33b, 'name': SearchDatastore_Task, 'duration_secs': 0.009573} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.328636] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.328945] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 1e172f73-972e-4401-b358-512f7e03b27f/1e172f73-972e-4401-b358-512f7e03b27f.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1689.329226] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-773dc3d2-f7cb-43b5-92c9-672ca2856621 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.336704] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1689.336704] env[63241]: value = "task-1820683" [ 1689.336704] env[63241]: _type = "Task" [ 1689.336704] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.346699] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.528518] env[63241]: INFO nova.compute.manager [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] instance snapshotting [ 1689.531693] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9263fd-fe24-44bb-8a8c-fb4bfc1a9988 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.555071] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dffca5d5-a28c-447a-b9c4-b6d03b04faa5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.602790] env[63241]: DEBUG nova.compute.utils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1689.604744] env[63241]: DEBUG nova.compute.manager [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1689.604965] env[63241]: DEBUG nova.network.neutron [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1689.669785] env[63241]: DEBUG nova.policy [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbc8bc30657b4647ba951c4b826ad5c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2338858c7214e8286c5948da80ffc1b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1689.677405] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "refresh_cache-73ea6bff-60da-4691-a569-f4e9ae92f701" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.677559] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired lock "refresh_cache-73ea6bff-60da-4691-a569-f4e9ae92f701" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.677714] env[63241]: DEBUG nova.network.neutron [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1689.785471] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820682, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.847504] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493384} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.847661] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 1e172f73-972e-4401-b358-512f7e03b27f/1e172f73-972e-4401-b358-512f7e03b27f.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1689.847807] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1689.848069] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8f3dc93-3895-403c-8d99-4a38b314726b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.854168] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1689.854168] env[63241]: value = "task-1820684" [ 1689.854168] env[63241]: _type = "Task" [ 1689.854168] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.864942] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820684, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.946583] env[63241]: DEBUG nova.network.neutron [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Successfully created port: eaf7faa9-8f94-4c74-9c0d-96c349efc7d7 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1689.957688] env[63241]: DEBUG nova.network.neutron [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Updated VIF entry in instance network info cache for port 68ec05cb-7eaf-4904-b491-0f5d3bb27936. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1689.958052] env[63241]: DEBUG nova.network.neutron [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Updating instance_info_cache with network_info: [{"id": "68ec05cb-7eaf-4904-b491-0f5d3bb27936", "address": "fa:16:3e:3b:43:74", "network": {"id": "909278be-6b42-43b3-ae32-decd720bc5f5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138389207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2338858c7214e8286c5948da80ffc1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ec05cb-7e", "ovs_interfaceid": "68ec05cb-7eaf-4904-b491-0f5d3bb27936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.071021] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1690.071021] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fd5138a3-0ff1-4fe9-a764-e3b94bbc6a04 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.076901] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1690.076901] env[63241]: value = "task-1820685" [ 1690.076901] env[63241]: _type = "Task" [ 1690.076901] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.085709] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820685, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.108902] env[63241]: DEBUG nova.compute.manager [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1690.229637] env[63241]: DEBUG nova.network.neutron [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1690.294595] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820682, 'name': CreateVM_Task, 'duration_secs': 0.828809} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.294799] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1690.296075] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.296075] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.296455] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1690.296796] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e57984ce-95b3-41c1-b82f-17fc1c0d8332 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.302152] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1690.302152] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5283ce44-1901-22d8-08ac-058b8d73e22a" [ 1690.302152] env[63241]: _type = "Task" [ 1690.302152] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.313731] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5283ce44-1901-22d8-08ac-058b8d73e22a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.365439] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820684, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071787} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.365747] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1690.366953] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2c6579-4ff2-4ccd-9cda-1f5e920fc93d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.398292] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 1e172f73-972e-4401-b358-512f7e03b27f/1e172f73-972e-4401-b358-512f7e03b27f.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1690.401097] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d883538f-182b-4ec7-a284-08c4a78b9be8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.420780] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1690.420780] env[63241]: value = "task-1820686" [ 1690.420780] env[63241]: _type = "Task" [ 1690.420780] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.433091] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820686, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.456650] env[63241]: DEBUG nova.network.neutron [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Updating instance_info_cache with network_info: [{"id": "328d662e-d2e8-4f8a-94b4-dacebf42accf", "address": "fa:16:3e:3c:84:a8", "network": {"id": "909278be-6b42-43b3-ae32-decd720bc5f5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138389207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2338858c7214e8286c5948da80ffc1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap328d662e-d2", "ovs_interfaceid": "328d662e-d2e8-4f8a-94b4-dacebf42accf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.460812] env[63241]: DEBUG oslo_concurrency.lockutils [req-46def28d-1e96-42e3-a041-e089b70661d7 req-a5e1eb5d-4ff7-4d1b-bed5-7aeff39e10b0 service nova] Releasing lock "refresh_cache-f372d405-f7d5-4e5f-8c36-fe9651af2a0d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.582794] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d50245-2455-4e05-9912-964201ecef17 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.590572] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820685, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.593326] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af593ea7-3664-49d1-aae9-2b2b6df96c59 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.626652] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d37bf7-f624-4823-9ce8-62440c32e42c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.633836] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfcab35f-62c7-4c88-8c50-6ec81c4f9193 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.647568] env[63241]: DEBUG nova.compute.provider_tree [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1690.812945] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5283ce44-1901-22d8-08ac-058b8d73e22a, 'name': SearchDatastore_Task, 'duration_secs': 0.036344} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.813353] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.813646] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1690.813917] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.814101] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.814347] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1690.814645] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e308a96f-55f6-4b0c-a7dc-987e9fdec47e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.823840] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1690.824070] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1690.824916] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-580af548-3480-402e-a44e-a4106e172481 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.832268] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1690.832268] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5236a455-619d-0f7b-d066-c3f28eac6fcb" [ 1690.832268] env[63241]: _type = "Task" [ 1690.832268] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.840330] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5236a455-619d-0f7b-d066-c3f28eac6fcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.930567] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820686, 'name': ReconfigVM_Task, 'duration_secs': 0.474252} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.930712] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 1e172f73-972e-4401-b358-512f7e03b27f/1e172f73-972e-4401-b358-512f7e03b27f.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1690.933792] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e39616d0-8d19-4c00-b8e8-013a1314df90 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.941043] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1690.941043] env[63241]: value = "task-1820687" [ 1690.941043] env[63241]: _type = "Task" [ 1690.941043] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.948186] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820687, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.959999] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Releasing lock "refresh_cache-73ea6bff-60da-4691-a569-f4e9ae92f701" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.960167] env[63241]: DEBUG nova.compute.manager [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Instance network_info: |[{"id": "328d662e-d2e8-4f8a-94b4-dacebf42accf", "address": "fa:16:3e:3c:84:a8", "network": {"id": "909278be-6b42-43b3-ae32-decd720bc5f5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138389207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2338858c7214e8286c5948da80ffc1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap328d662e-d2", "ovs_interfaceid": "328d662e-d2e8-4f8a-94b4-dacebf42accf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1690.960570] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:84:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '447ff42d-b33e-4b5d-8b7f-e8117ebbbc92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '328d662e-d2e8-4f8a-94b4-dacebf42accf', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1690.968656] env[63241]: DEBUG oslo.service.loopingcall [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1690.968656] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1690.968889] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e5259a4-9acc-42c3-8bd3-a39bd76ab529 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.987898] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1690.987898] env[63241]: value = "task-1820688" [ 1690.987898] env[63241]: _type = "Task" [ 1690.987898] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.995409] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820688, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.089621] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820685, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.099559] env[63241]: DEBUG nova.compute.manager [req-437a1598-fd96-4ff2-99eb-bc988d7d7192 req-62bc875b-e0ce-4283-9b96-495f40daf494 service nova] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Received event network-changed-328d662e-d2e8-4f8a-94b4-dacebf42accf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1691.099756] env[63241]: DEBUG nova.compute.manager [req-437a1598-fd96-4ff2-99eb-bc988d7d7192 req-62bc875b-e0ce-4283-9b96-495f40daf494 service nova] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Refreshing instance network info cache due to event network-changed-328d662e-d2e8-4f8a-94b4-dacebf42accf. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1691.099967] env[63241]: DEBUG oslo_concurrency.lockutils [req-437a1598-fd96-4ff2-99eb-bc988d7d7192 req-62bc875b-e0ce-4283-9b96-495f40daf494 service nova] Acquiring lock "refresh_cache-73ea6bff-60da-4691-a569-f4e9ae92f701" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.100120] env[63241]: DEBUG oslo_concurrency.lockutils [req-437a1598-fd96-4ff2-99eb-bc988d7d7192 req-62bc875b-e0ce-4283-9b96-495f40daf494 service nova] Acquired lock "refresh_cache-73ea6bff-60da-4691-a569-f4e9ae92f701" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.100308] env[63241]: DEBUG nova.network.neutron [req-437a1598-fd96-4ff2-99eb-bc988d7d7192 req-62bc875b-e0ce-4283-9b96-495f40daf494 service nova] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Refreshing network info cache for port 328d662e-d2e8-4f8a-94b4-dacebf42accf {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1691.130063] env[63241]: DEBUG nova.compute.manager [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1691.158289] env[63241]: DEBUG nova.virt.hardware [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1691.158545] env[63241]: DEBUG nova.virt.hardware [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1691.158690] env[63241]: DEBUG nova.virt.hardware [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1691.158902] env[63241]: DEBUG nova.virt.hardware [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1691.159043] env[63241]: DEBUG nova.virt.hardware [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1691.159172] env[63241]: DEBUG nova.virt.hardware [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1691.159375] env[63241]: DEBUG nova.virt.hardware [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1691.159537] env[63241]: DEBUG nova.virt.hardware [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1691.159702] env[63241]: DEBUG nova.virt.hardware [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1691.159861] env[63241]: DEBUG nova.virt.hardware [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1691.160046] env[63241]: DEBUG nova.virt.hardware [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1691.161210] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8e8099-52f0-4ae0-867a-18d41bf0de65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.166723] env[63241]: ERROR nova.scheduler.client.report [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [req-847ee8de-7562-4742-a1a5-de7c41029f9b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-847ee8de-7562-4742-a1a5-de7c41029f9b"}]} [ 1691.172104] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5025eef3-215a-4079-88b8-1e3690e14111 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.187178] env[63241]: DEBUG nova.scheduler.client.report [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1691.202884] env[63241]: DEBUG nova.scheduler.client.report [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1691.202884] env[63241]: DEBUG nova.compute.provider_tree [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1691.216074] env[63241]: DEBUG nova.scheduler.client.report [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1691.237830] env[63241]: DEBUG nova.scheduler.client.report [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1691.344937] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5236a455-619d-0f7b-d066-c3f28eac6fcb, 'name': SearchDatastore_Task, 'duration_secs': 0.01113} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.345860] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60d4510f-f6c4-4586-b864-39f9cf85f6f0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.356350] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1691.356350] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ac5983-23e7-4d3c-f24e-43bb23a8a209" [ 1691.356350] env[63241]: _type = "Task" [ 1691.356350] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.365160] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ac5983-23e7-4d3c-f24e-43bb23a8a209, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.454700] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820687, 'name': Rename_Task, 'duration_secs': 0.150358} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.454700] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1691.454700] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f575edbc-5937-4e93-a7d1-869d5af2c2f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.461462] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1691.461462] env[63241]: value = "task-1820689" [ 1691.461462] env[63241]: _type = "Task" [ 1691.461462] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.469425] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820689, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.486366] env[63241]: DEBUG nova.network.neutron [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Successfully updated port: eaf7faa9-8f94-4c74-9c0d-96c349efc7d7 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1691.502032] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820688, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.590239] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820685, 'name': CreateSnapshot_Task, 'duration_secs': 1.385243} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.590531] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1691.591276] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6037da0-f5e1-4334-bf22-92bf1a137cde {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.626614] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cad46ad-10ef-4ebd-b9ee-bc3314172244 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.634940] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f88710-1adf-4ee7-baea-a20024379577 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.668376] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca929850-f00d-4979-a1c3-5dd067362b32 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.676006] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f987c28-6ff0-4a53-a974-b077c4dabbfc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.691834] env[63241]: DEBUG nova.compute.provider_tree [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1691.735554] env[63241]: INFO nova.compute.manager [None req-d1e5cd57-2014-41af-ba9f-d58d1ba9d1c4 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Get console output [ 1691.735905] env[63241]: WARNING nova.virt.vmwareapi.driver [None req-d1e5cd57-2014-41af-ba9f-d58d1ba9d1c4 tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] The console log is missing. Check your VSPC configuration [ 1691.812119] env[63241]: DEBUG nova.network.neutron [req-437a1598-fd96-4ff2-99eb-bc988d7d7192 req-62bc875b-e0ce-4283-9b96-495f40daf494 service nova] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Updated VIF entry in instance network info cache for port 328d662e-d2e8-4f8a-94b4-dacebf42accf. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1691.812508] env[63241]: DEBUG nova.network.neutron [req-437a1598-fd96-4ff2-99eb-bc988d7d7192 req-62bc875b-e0ce-4283-9b96-495f40daf494 service nova] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Updating instance_info_cache with network_info: [{"id": "328d662e-d2e8-4f8a-94b4-dacebf42accf", "address": "fa:16:3e:3c:84:a8", "network": {"id": "909278be-6b42-43b3-ae32-decd720bc5f5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138389207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2338858c7214e8286c5948da80ffc1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap328d662e-d2", "ovs_interfaceid": "328d662e-d2e8-4f8a-94b4-dacebf42accf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.866380] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ac5983-23e7-4d3c-f24e-43bb23a8a209, 'name': SearchDatastore_Task, 'duration_secs': 0.014386} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.866677] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.866945] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f372d405-f7d5-4e5f-8c36-fe9651af2a0d/f372d405-f7d5-4e5f-8c36-fe9651af2a0d.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1691.867243] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3eb58fd-d70b-4471-9109-0b96044777fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.874153] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1691.874153] env[63241]: value = "task-1820690" [ 1691.874153] env[63241]: _type = "Task" [ 1691.874153] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.882250] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.971837] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820689, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.988554] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "refresh_cache-a77f7227-0285-48b8-bb3b-f5cfe7ad4646" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.988705] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired lock "refresh_cache-a77f7227-0285-48b8-bb3b-f5cfe7ad4646" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.988860] env[63241]: DEBUG nova.network.neutron [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1692.000247] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820688, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.115582] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1692.116088] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-86781682-5215-452c-b814-0b663d268fd6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.125301] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1692.125301] env[63241]: value = "task-1820691" [ 1692.125301] env[63241]: _type = "Task" [ 1692.125301] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.133386] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820691, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.223570] env[63241]: DEBUG nova.scheduler.client.report [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 118 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1692.223946] env[63241]: DEBUG nova.compute.provider_tree [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 118 to 119 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1692.224199] env[63241]: DEBUG nova.compute.provider_tree [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1692.315141] env[63241]: DEBUG oslo_concurrency.lockutils [req-437a1598-fd96-4ff2-99eb-bc988d7d7192 req-62bc875b-e0ce-4283-9b96-495f40daf494 service nova] Releasing lock "refresh_cache-73ea6bff-60da-4691-a569-f4e9ae92f701" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.384285] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.473569] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820689, 'name': PowerOnVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.502271] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820688, 'name': CreateVM_Task, 'duration_secs': 1.512423} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.502465] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1692.503176] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1692.503363] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1692.503681] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1692.503941] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b88045f-c758-4892-9522-946f90f20761 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.508955] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1692.508955] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e4b6aa-7ec6-9044-6674-890a2be1d1fb" [ 1692.508955] env[63241]: _type = "Task" [ 1692.508955] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.518750] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e4b6aa-7ec6-9044-6674-890a2be1d1fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.532372] env[63241]: DEBUG nova.network.neutron [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1692.636729] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820691, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.692050] env[63241]: DEBUG nova.network.neutron [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Updating instance_info_cache with network_info: [{"id": "eaf7faa9-8f94-4c74-9c0d-96c349efc7d7", "address": "fa:16:3e:a2:66:39", "network": {"id": "909278be-6b42-43b3-ae32-decd720bc5f5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138389207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2338858c7214e8286c5948da80ffc1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf7faa9-8f", "ovs_interfaceid": "eaf7faa9-8f94-4c74-9c0d-96c349efc7d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.730280] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.634s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.730794] env[63241]: DEBUG nova.compute.manager [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1692.733682] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 19.802s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.733784] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.733911] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1692.734186] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 19.552s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.736636] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddea5fba-c0ac-4ddb-a0ab-97917a6594dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.746110] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc159e9-8b14-44df-b48b-dfd97651d95a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.761839] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f64a26-b121-4ffc-bcd8-f4a12c6addc0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.771261] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51cc59fe-8e3e-4c3e-8e86-88c1aa6f5673 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.801871] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178754MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1692.802055] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.842144] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquiring lock "72a11582-1fad-428a-bde1-e9d0b05731cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.842293] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Lock "72a11582-1fad-428a-bde1-e9d0b05731cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.842500] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquiring lock "72a11582-1fad-428a-bde1-e9d0b05731cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.842763] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Lock "72a11582-1fad-428a-bde1-e9d0b05731cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.842943] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Lock "72a11582-1fad-428a-bde1-e9d0b05731cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.845173] env[63241]: INFO nova.compute.manager [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Terminating instance [ 1692.847266] env[63241]: DEBUG nova.compute.manager [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1692.847462] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1692.848417] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23549296-e15e-4a78-a970-c06a625df68a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.857875] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1692.858634] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18c5900b-462f-4cd3-a2a0-e75ae204cce3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.866291] env[63241]: DEBUG oslo_vmware.api [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for the task: (returnval){ [ 1692.866291] env[63241]: value = "task-1820692" [ 1692.866291] env[63241]: _type = "Task" [ 1692.866291] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.874919] env[63241]: DEBUG oslo_vmware.api [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820692, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.884132] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.998296} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.884650] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f372d405-f7d5-4e5f-8c36-fe9651af2a0d/f372d405-f7d5-4e5f-8c36-fe9651af2a0d.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1692.884891] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1692.885248] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78281c75-c600-41ca-b313-80ed412bbaf9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.894013] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1692.894013] env[63241]: value = "task-1820693" [ 1692.894013] env[63241]: _type = "Task" [ 1692.894013] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.903076] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820693, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.979813] env[63241]: DEBUG oslo_vmware.api [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820689, 'name': PowerOnVM_Task, 'duration_secs': 1.084593} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.980050] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1692.980261] env[63241]: INFO nova.compute.manager [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Took 9.60 seconds to spawn the instance on the hypervisor. [ 1692.980456] env[63241]: DEBUG nova.compute.manager [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1692.981266] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ece5405-003d-4b34-afb9-9d12299b55df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.020126] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e4b6aa-7ec6-9044-6674-890a2be1d1fb, 'name': SearchDatastore_Task, 'duration_secs': 0.009395} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.020513] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.020824] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1693.021111] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.021329] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.021539] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1693.021871] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1641789-0cdc-48db-bc8f-8468566b5422 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.033350] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1693.033604] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1693.034706] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eac52951-2079-492f-8c4b-bfde9fabd114 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.041063] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1693.041063] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526bd135-4f58-b59c-2318-63beabefac1f" [ 1693.041063] env[63241]: _type = "Task" [ 1693.041063] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.050782] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526bd135-4f58-b59c-2318-63beabefac1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.138412] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820691, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.144088] env[63241]: DEBUG nova.compute.manager [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Received event network-vif-plugged-eaf7faa9-8f94-4c74-9c0d-96c349efc7d7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1693.144438] env[63241]: DEBUG oslo_concurrency.lockutils [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] Acquiring lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.144700] env[63241]: DEBUG oslo_concurrency.lockutils [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] Lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.144734] env[63241]: DEBUG oslo_concurrency.lockutils [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] Lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.144885] env[63241]: DEBUG nova.compute.manager [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] No waiting events found dispatching network-vif-plugged-eaf7faa9-8f94-4c74-9c0d-96c349efc7d7 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1693.145066] env[63241]: WARNING nova.compute.manager [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Received unexpected event network-vif-plugged-eaf7faa9-8f94-4c74-9c0d-96c349efc7d7 for instance with vm_state building and task_state spawning. [ 1693.145231] env[63241]: DEBUG nova.compute.manager [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Received event network-changed-eaf7faa9-8f94-4c74-9c0d-96c349efc7d7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1693.145384] env[63241]: DEBUG nova.compute.manager [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Refreshing instance network info cache due to event network-changed-eaf7faa9-8f94-4c74-9c0d-96c349efc7d7. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1693.145548] env[63241]: DEBUG oslo_concurrency.lockutils [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] Acquiring lock "refresh_cache-a77f7227-0285-48b8-bb3b-f5cfe7ad4646" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.194755] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Releasing lock "refresh_cache-a77f7227-0285-48b8-bb3b-f5cfe7ad4646" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.195164] env[63241]: DEBUG nova.compute.manager [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Instance network_info: |[{"id": "eaf7faa9-8f94-4c74-9c0d-96c349efc7d7", "address": "fa:16:3e:a2:66:39", "network": {"id": "909278be-6b42-43b3-ae32-decd720bc5f5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138389207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2338858c7214e8286c5948da80ffc1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf7faa9-8f", "ovs_interfaceid": "eaf7faa9-8f94-4c74-9c0d-96c349efc7d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1693.196217] env[63241]: DEBUG oslo_concurrency.lockutils [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] Acquired lock "refresh_cache-a77f7227-0285-48b8-bb3b-f5cfe7ad4646" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.196217] env[63241]: DEBUG nova.network.neutron [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Refreshing network info cache for port eaf7faa9-8f94-4c74-9c0d-96c349efc7d7 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1693.198119] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:66:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '447ff42d-b33e-4b5d-8b7f-e8117ebbbc92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eaf7faa9-8f94-4c74-9c0d-96c349efc7d7', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1693.205887] env[63241]: DEBUG oslo.service.loopingcall [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1693.206900] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1693.207151] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4be4251-22ce-49a0-934a-960852246200 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.227604] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1693.227604] env[63241]: value = "task-1820694" [ 1693.227604] env[63241]: _type = "Task" [ 1693.227604] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.235507] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820694, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.238049] env[63241]: DEBUG nova.compute.utils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1693.239023] env[63241]: DEBUG nova.compute.manager [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1693.239199] env[63241]: DEBUG nova.network.neutron [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1693.244123] env[63241]: INFO nova.compute.claims [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1693.291047] env[63241]: DEBUG nova.policy [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f52bf5741a490c83e01e06f686559e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c64d07a686b414f93ec4c599307498f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1693.376930] env[63241]: DEBUG oslo_vmware.api [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820692, 'name': PowerOffVM_Task, 'duration_secs': 0.193161} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.377228] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1693.377680] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1693.377680] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f119f7e7-16f4-48aa-a774-eecac466fc70 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.403903] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820693, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06529} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.404219] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1693.405137] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13abf4bd-5312-424b-9c38-6a867ad85717 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.430073] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] f372d405-f7d5-4e5f-8c36-fe9651af2a0d/f372d405-f7d5-4e5f-8c36-fe9651af2a0d.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1693.430346] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9671b810-006e-4536-9f78-e63f902790b4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.450159] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1693.450159] env[63241]: value = "task-1820696" [ 1693.450159] env[63241]: _type = "Task" [ 1693.450159] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.458271] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820696, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.504178] env[63241]: INFO nova.compute.manager [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Took 25.05 seconds to build instance. [ 1693.554466] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526bd135-4f58-b59c-2318-63beabefac1f, 'name': SearchDatastore_Task, 'duration_secs': 0.019857} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.555801] env[63241]: DEBUG nova.network.neutron [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Successfully created port: 92b2ae26-3653-4737-891e-09a99ee68a10 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1693.558505] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83f58074-bbd6-4c70-a230-67f514ce70c6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.565446] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1693.565446] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528d5bf4-2cd9-235f-dbdf-2cc3e583ebbd" [ 1693.565446] env[63241]: _type = "Task" [ 1693.565446] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.573810] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528d5bf4-2cd9-235f-dbdf-2cc3e583ebbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.637328] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820691, 'name': CloneVM_Task} progress is 95%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.649340] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1693.649624] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1693.649931] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Deleting the datastore file [datastore1] 72a11582-1fad-428a-bde1-e9d0b05731cd {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1693.650234] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-645f92a2-4f24-4bf5-baec-8319e6944dc5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.656546] env[63241]: DEBUG oslo_vmware.api [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for the task: (returnval){ [ 1693.656546] env[63241]: value = "task-1820697" [ 1693.656546] env[63241]: _type = "Task" [ 1693.656546] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.665140] env[63241]: DEBUG oslo_vmware.api [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820697, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.738544] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820694, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.742290] env[63241]: DEBUG nova.compute.manager [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1693.752948] env[63241]: INFO nova.compute.resource_tracker [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating resource usage from migration a30c871a-6336-4949-86a2-c2009c56c7b0 [ 1693.965348] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820696, 'name': ReconfigVM_Task, 'duration_secs': 0.289508} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.965775] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Reconfigured VM instance instance-0000004f to attach disk [datastore1] f372d405-f7d5-4e5f-8c36-fe9651af2a0d/f372d405-f7d5-4e5f-8c36-fe9651af2a0d.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1693.966590] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d308624-50f1-418f-a36d-5fc392b2a2ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.976205] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1693.976205] env[63241]: value = "task-1820698" [ 1693.976205] env[63241]: _type = "Task" [ 1693.976205] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.983086] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820698, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.006730] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1fbc6944-067e-41a8-a39e-4852d605e9a6 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.558s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.078154] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528d5bf4-2cd9-235f-dbdf-2cc3e583ebbd, 'name': SearchDatastore_Task, 'duration_secs': 0.00935} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.078436] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.078753] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 73ea6bff-60da-4691-a569-f4e9ae92f701/73ea6bff-60da-4691-a569-f4e9ae92f701.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1694.079939] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0eec53e7-76d0-42b3-8407-38d57472279a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.083175] env[63241]: DEBUG nova.network.neutron [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Updated VIF entry in instance network info cache for port eaf7faa9-8f94-4c74-9c0d-96c349efc7d7. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1694.083661] env[63241]: DEBUG nova.network.neutron [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Updating instance_info_cache with network_info: [{"id": "eaf7faa9-8f94-4c74-9c0d-96c349efc7d7", "address": "fa:16:3e:a2:66:39", "network": {"id": "909278be-6b42-43b3-ae32-decd720bc5f5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138389207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2338858c7214e8286c5948da80ffc1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf7faa9-8f", "ovs_interfaceid": "eaf7faa9-8f94-4c74-9c0d-96c349efc7d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1694.091293] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1694.091293] env[63241]: value = "task-1820699" [ 1694.091293] env[63241]: _type = "Task" [ 1694.091293] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.102892] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820699, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.140353] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820691, 'name': CloneVM_Task, 'duration_secs': 1.675895} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.143171] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Created linked-clone VM from snapshot [ 1694.144120] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d223d5-9681-40c9-972c-4aea80cb77d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.151903] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Uploading image 2349504f-9876-4d0a-8cdd-4551b70959b3 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1694.167659] env[63241]: DEBUG oslo_vmware.api [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Task: {'id': task-1820697, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150088} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.170390] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1694.171026] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1694.171026] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1694.171026] env[63241]: INFO nova.compute.manager [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1694.171261] env[63241]: DEBUG oslo.service.loopingcall [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1694.172751] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1694.172751] env[63241]: DEBUG nova.compute.manager [-] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1694.172751] env[63241]: DEBUG nova.network.neutron [-] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1694.176384] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e6b7d397-f0e1-46df-95f6-76364774a536 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.180670] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1694.180670] env[63241]: value = "task-1820700" [ 1694.180670] env[63241]: _type = "Task" [ 1694.180670] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.186455] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56bafec-959e-4d0c-acd9-7b5fd2d1d73f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.195778] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820700, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.198840] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfde9ffe-ccef-4d65-8b5c-830e995619be {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.237680] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603b88de-089a-4289-bfe6-3b558c9733e8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.245757] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820694, 'name': CreateVM_Task, 'duration_secs': 0.926159} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.254555] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1694.255611] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1694.255797] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1694.256146] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1694.257809] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7581fd3b-3b1f-4128-9684-60b958fd99ee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.262107] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee1aba20-780a-4be1-97ac-560dcb56919d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.274875] env[63241]: DEBUG nova.compute.provider_tree [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1694.277953] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1694.277953] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]525395f6-c937-7452-6933-f69d875f1a74" [ 1694.277953] env[63241]: _type = "Task" [ 1694.277953] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.289460] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525395f6-c937-7452-6933-f69d875f1a74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.487970] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820698, 'name': Rename_Task, 'duration_secs': 0.290502} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.488251] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1694.488321] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c75271e2-d891-4b34-a86f-597c38613975 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.498060] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1694.498060] env[63241]: value = "task-1820701" [ 1694.498060] env[63241]: _type = "Task" [ 1694.498060] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.510009] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820701, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.588652] env[63241]: DEBUG oslo_concurrency.lockutils [req-5846101b-18ef-4b2b-b5c8-de98e6bd5f72 req-561d3c15-f0db-4bbe-bfeb-72aa81ff314d service nova] Releasing lock "refresh_cache-a77f7227-0285-48b8-bb3b-f5cfe7ad4646" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.602833] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820699, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.694740] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820700, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.760408] env[63241]: DEBUG nova.compute.manager [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1694.795990] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525395f6-c937-7452-6933-f69d875f1a74, 'name': SearchDatastore_Task, 'duration_secs': 0.020224} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.798142] env[63241]: DEBUG nova.virt.hardware [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1694.798386] env[63241]: DEBUG nova.virt.hardware [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1694.798545] env[63241]: DEBUG nova.virt.hardware [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1694.798732] env[63241]: DEBUG nova.virt.hardware [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1694.798873] env[63241]: DEBUG nova.virt.hardware [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1694.799034] env[63241]: DEBUG nova.virt.hardware [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1694.799258] env[63241]: DEBUG nova.virt.hardware [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1694.799421] env[63241]: DEBUG nova.virt.hardware [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1694.799612] env[63241]: DEBUG nova.virt.hardware [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1694.799781] env[63241]: DEBUG nova.virt.hardware [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1694.799951] env[63241]: DEBUG nova.virt.hardware [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1694.800474] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.800661] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1694.800894] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1694.801046] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1694.801248] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1694.802588] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9688e6c8-d422-48f6-979d-d94ba2fc01da {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.804678] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d4a3208-9de3-472c-8e00-6650fc857486 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.816218] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f585d80-bb9c-4704-a987-c7db9198e239 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.820120] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1694.820120] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1694.821701] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3c59a8f-a57f-4548-9985-a7a2c12c33f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.824135] env[63241]: DEBUG nova.scheduler.client.report [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 119 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1694.824365] env[63241]: DEBUG nova.compute.provider_tree [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 119 to 120 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1694.824539] env[63241]: DEBUG nova.compute.provider_tree [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1694.839836] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1694.839836] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a45bf4-1333-9496-be3b-b90a0ddcf7e0" [ 1694.839836] env[63241]: _type = "Task" [ 1694.839836] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.848229] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a45bf4-1333-9496-be3b-b90a0ddcf7e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.922545] env[63241]: DEBUG nova.compute.manager [req-49b0582a-b56c-427b-ab52-c1346ff74853 req-7c06334b-7562-4cb7-93ab-a38ba23c6fbb service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Received event network-vif-deleted-bd89b471-e155-45fc-9b21-40bc75f8f48a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1694.922745] env[63241]: INFO nova.compute.manager [req-49b0582a-b56c-427b-ab52-c1346ff74853 req-7c06334b-7562-4cb7-93ab-a38ba23c6fbb service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Neutron deleted interface bd89b471-e155-45fc-9b21-40bc75f8f48a; detaching it from the instance and deleting it from the info cache [ 1694.922916] env[63241]: DEBUG nova.network.neutron [req-49b0582a-b56c-427b-ab52-c1346ff74853 req-7c06334b-7562-4cb7-93ab-a38ba23c6fbb service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.007653] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820701, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.084921] env[63241]: DEBUG nova.network.neutron [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Successfully updated port: 92b2ae26-3653-4737-891e-09a99ee68a10 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1695.104482] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820699, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.693158} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.104884] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 73ea6bff-60da-4691-a569-f4e9ae92f701/73ea6bff-60da-4691-a569-f4e9ae92f701.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1695.105027] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1695.105280] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-512834c8-02af-407f-b073-96c997194918 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.112029] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1695.112029] env[63241]: value = "task-1820702" [ 1695.112029] env[63241]: _type = "Task" [ 1695.112029] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.124216] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820702, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.194441] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820700, 'name': Destroy_Task, 'duration_secs': 0.639141} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.197697] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Destroyed the VM [ 1695.197697] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1695.197697] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c6912521-71ea-4558-864c-47a6da3c844d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.202848] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1695.202848] env[63241]: value = "task-1820703" [ 1695.202848] env[63241]: _type = "Task" [ 1695.202848] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.211723] env[63241]: DEBUG nova.compute.manager [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Received event network-changed-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1695.211823] env[63241]: DEBUG nova.compute.manager [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Refreshing instance network info cache due to event network-changed-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1695.212131] env[63241]: DEBUG oslo_concurrency.lockutils [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] Acquiring lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.212379] env[63241]: DEBUG oslo_concurrency.lockutils [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] Acquired lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.212708] env[63241]: DEBUG nova.network.neutron [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Refreshing network info cache for port fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1695.221568] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820703, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.337419] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.603s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.337614] env[63241]: INFO nova.compute.manager [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Migrating [ 1695.346347] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.921s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.347991] env[63241]: INFO nova.compute.claims [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1695.369540] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a45bf4-1333-9496-be3b-b90a0ddcf7e0, 'name': SearchDatastore_Task, 'duration_secs': 0.009977} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.370425] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a424cb7e-72e4-4aa0-a154-29d42c61bf4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.376337] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1695.376337] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5281071d-580e-f959-0ba1-0321c8a1d23b" [ 1695.376337] env[63241]: _type = "Task" [ 1695.376337] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.385784] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5281071d-580e-f959-0ba1-0321c8a1d23b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.396110] env[63241]: DEBUG nova.network.neutron [-] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.426505] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63367344-6529-442b-b67d-19632d1c0a1e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.436158] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b18b209-25f0-4c61-a923-1122b4ca3671 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.477726] env[63241]: DEBUG nova.compute.manager [req-49b0582a-b56c-427b-ab52-c1346ff74853 req-7c06334b-7562-4cb7-93ab-a38ba23c6fbb service nova] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Detach interface failed, port_id=bd89b471-e155-45fc-9b21-40bc75f8f48a, reason: Instance 72a11582-1fad-428a-bde1-e9d0b05731cd could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1695.510873] env[63241]: DEBUG oslo_vmware.api [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820701, 'name': PowerOnVM_Task, 'duration_secs': 0.711823} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.512019] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1695.512019] env[63241]: INFO nova.compute.manager [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Took 9.48 seconds to spawn the instance on the hypervisor. [ 1695.512244] env[63241]: DEBUG nova.compute.manager [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1695.513558] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3b2cae-0b08-4fd2-a707-7a216b91cd6c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.586964] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "refresh_cache-44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.588749] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "refresh_cache-44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.588749] env[63241]: DEBUG nova.network.neutron [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1695.628292] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820702, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069447} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.629453] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1695.631099] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-402564c7-c5ea-4735-a2bc-86db5fccaf85 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.655493] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 73ea6bff-60da-4691-a569-f4e9ae92f701/73ea6bff-60da-4691-a569-f4e9ae92f701.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1695.655838] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf9b9939-da83-43eb-bccc-c11e66628e92 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.675906] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1695.675906] env[63241]: value = "task-1820704" [ 1695.675906] env[63241]: _type = "Task" [ 1695.675906] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.684906] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820704, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.712231] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820703, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.817994] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.818387] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.818652] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.818844] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.819028] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.820994] env[63241]: INFO nova.compute.manager [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Terminating instance [ 1695.822702] env[63241]: DEBUG nova.compute.manager [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1695.822889] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1695.823708] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52158e15-6d8f-4c87-88eb-f5b0073bee06 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.831227] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1695.831227] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28796b21-b610-4ec9-a2e7-2acb0631a16b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.837886] env[63241]: DEBUG oslo_vmware.api [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1695.837886] env[63241]: value = "task-1820705" [ 1695.837886] env[63241]: _type = "Task" [ 1695.837886] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.847646] env[63241]: DEBUG oslo_vmware.api [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820705, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.863705] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1695.863908] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1695.864075] env[63241]: DEBUG nova.network.neutron [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1695.887488] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5281071d-580e-f959-0ba1-0321c8a1d23b, 'name': SearchDatastore_Task, 'duration_secs': 0.014869} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.887885] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.888248] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a77f7227-0285-48b8-bb3b-f5cfe7ad4646/a77f7227-0285-48b8-bb3b-f5cfe7ad4646.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1695.888518] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a742645-4dd0-4e89-ab33-a297139188e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.897281] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1695.897281] env[63241]: value = "task-1820706" [ 1695.897281] env[63241]: _type = "Task" [ 1695.897281] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.902602] env[63241]: INFO nova.compute.manager [-] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Took 1.73 seconds to deallocate network for instance. [ 1695.910317] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820706, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.960793] env[63241]: DEBUG nova.network.neutron [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updated VIF entry in instance network info cache for port fbeb829e-4c31-429b-bdb0-ecb7331ef4ea. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1695.961305] env[63241]: DEBUG nova.network.neutron [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updating instance_info_cache with network_info: [{"id": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "address": "fa:16:3e:17:1d:18", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbeb829e-4c", "ovs_interfaceid": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.035638] env[63241]: INFO nova.compute.manager [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Took 27.04 seconds to build instance. [ 1696.129194] env[63241]: DEBUG nova.network.neutron [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1696.188337] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820704, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.217421] env[63241]: DEBUG oslo_vmware.api [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820703, 'name': RemoveSnapshot_Task, 'duration_secs': 0.805969} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.217653] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1696.301677] env[63241]: DEBUG nova.network.neutron [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Updating instance_info_cache with network_info: [{"id": "92b2ae26-3653-4737-891e-09a99ee68a10", "address": "fa:16:3e:7e:66:33", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2ae26-36", "ovs_interfaceid": "92b2ae26-3653-4737-891e-09a99ee68a10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.348815] env[63241]: DEBUG oslo_vmware.api [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820705, 'name': PowerOffVM_Task, 'duration_secs': 0.394613} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.349154] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1696.349358] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1696.349636] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb9275cc-f850-4622-af57-b3163f5f8d5d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.412126] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.415598] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820706, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.463791] env[63241]: DEBUG oslo_concurrency.lockutils [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] Releasing lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1696.464281] env[63241]: DEBUG nova.compute.manager [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Received event network-vif-plugged-92b2ae26-3653-4737-891e-09a99ee68a10 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1696.464281] env[63241]: DEBUG oslo_concurrency.lockutils [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] Acquiring lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.464476] env[63241]: DEBUG oslo_concurrency.lockutils [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] Lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.464681] env[63241]: DEBUG oslo_concurrency.lockutils [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] Lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.464964] env[63241]: DEBUG nova.compute.manager [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] No waiting events found dispatching network-vif-plugged-92b2ae26-3653-4737-891e-09a99ee68a10 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1696.465171] env[63241]: WARNING nova.compute.manager [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Received unexpected event network-vif-plugged-92b2ae26-3653-4737-891e-09a99ee68a10 for instance with vm_state building and task_state spawning. [ 1696.465351] env[63241]: DEBUG nova.compute.manager [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Received event network-changed-92b2ae26-3653-4737-891e-09a99ee68a10 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1696.465501] env[63241]: DEBUG nova.compute.manager [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Refreshing instance network info cache due to event network-changed-92b2ae26-3653-4737-891e-09a99ee68a10. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1696.465724] env[63241]: DEBUG oslo_concurrency.lockutils [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] Acquiring lock "refresh_cache-44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.536846] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f720b47a-b2e5-40e3-928b-4728f5eac0bc tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.553s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.562032] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1696.562032] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1696.562187] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Deleting the datastore file [datastore1] c7b034f7-1d7f-4782-9ecb-5987c35339cc {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1696.562432] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6333baa4-a3cb-4bda-9709-06bb6ec8add6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.568961] env[63241]: DEBUG oslo_vmware.api [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for the task: (returnval){ [ 1696.568961] env[63241]: value = "task-1820708" [ 1696.568961] env[63241]: _type = "Task" [ 1696.568961] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.580068] env[63241]: DEBUG oslo_vmware.api [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820708, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.623829] env[63241]: DEBUG nova.network.neutron [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance_info_cache with network_info: [{"id": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "address": "fa:16:3e:72:df:ac", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.145", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f1b482-fc", "ovs_interfaceid": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.686663] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820704, 'name': ReconfigVM_Task, 'duration_secs': 0.628985} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.689218] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 73ea6bff-60da-4691-a569-f4e9ae92f701/73ea6bff-60da-4691-a569-f4e9ae92f701.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1696.690168] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f685cb3-332f-412d-a79a-174986023419 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.696020] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1696.696020] env[63241]: value = "task-1820709" [ 1696.696020] env[63241]: _type = "Task" [ 1696.696020] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.708015] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820709, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.724390] env[63241]: WARNING nova.compute.manager [None req-1c929128-27f2-41c3-ae61-308858531e2a tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Image not found during snapshot: nova.exception.ImageNotFound: Image 2349504f-9876-4d0a-8cdd-4551b70959b3 could not be found. [ 1696.786018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483bd22e-a7f1-4cac-871f-fd0af2c7866c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.792983] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fe0dff-ce9d-4cb0-97cf-ce121f59ef71 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.828055] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "refresh_cache-44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1696.828353] env[63241]: DEBUG nova.compute.manager [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Instance network_info: |[{"id": "92b2ae26-3653-4737-891e-09a99ee68a10", "address": "fa:16:3e:7e:66:33", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2ae26-36", "ovs_interfaceid": "92b2ae26-3653-4737-891e-09a99ee68a10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1696.829257] env[63241]: DEBUG oslo_concurrency.lockutils [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] Acquired lock "refresh_cache-44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.829450] env[63241]: DEBUG nova.network.neutron [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Refreshing network info cache for port 92b2ae26-3653-4737-891e-09a99ee68a10 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1696.830590] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:66:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dacd109c-2442-41b8-b612-7ed3efbdaa94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92b2ae26-3653-4737-891e-09a99ee68a10', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1696.838321] env[63241]: DEBUG oslo.service.loopingcall [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1696.839023] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e9c40c-2a49-4070-a744-e1383c8cf163 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.842496] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1696.842713] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf8b4b96-a3cf-4d35-ae83-a7f67ddb7c26 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.863068] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf84126-d186-46c4-acbd-362f7cae9799 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.868089] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1696.868089] env[63241]: value = "task-1820710" [ 1696.868089] env[63241]: _type = "Task" [ 1696.868089] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.880281] env[63241]: DEBUG nova.compute.provider_tree [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1696.888925] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.907612] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820706, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.717436} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.907865] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] a77f7227-0285-48b8-bb3b-f5cfe7ad4646/a77f7227-0285-48b8-bb3b-f5cfe7ad4646.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1696.908085] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1696.908340] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c7182c6-4ae6-4c08-9569-42d1ce61937e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.914223] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1696.914223] env[63241]: value = "task-1820711" [ 1696.914223] env[63241]: _type = "Task" [ 1696.914223] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.925049] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820711, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.081661] env[63241]: DEBUG oslo_vmware.api [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820708, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.126490] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1697.207444] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820709, 'name': Rename_Task, 'duration_secs': 0.432126} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.207807] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1697.208122] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf00c7e9-c9de-404b-be36-a3bc8f9d5105 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.214182] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1697.214182] env[63241]: value = "task-1820712" [ 1697.214182] env[63241]: _type = "Task" [ 1697.214182] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.222462] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820712, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.381965] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.412868] env[63241]: DEBUG nova.scheduler.client.report [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1697.413159] env[63241]: DEBUG nova.compute.provider_tree [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 120 to 121 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1697.413370] env[63241]: DEBUG nova.compute.provider_tree [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1697.432857] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820711, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.182356} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.434149] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1697.434286] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0481f12-8062-4148-bb2e-9e0cfbca8ca5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.470289] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] a77f7227-0285-48b8-bb3b-f5cfe7ad4646/a77f7227-0285-48b8-bb3b-f5cfe7ad4646.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1697.472722] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd51a8ff-fa37-40ad-a22a-809257ffd301 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.501260] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1697.501260] env[63241]: value = "task-1820713" [ 1697.501260] env[63241]: _type = "Task" [ 1697.501260] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.514191] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820713, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.582531] env[63241]: DEBUG oslo_vmware.api [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Task: {'id': task-1820708, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.841353} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.582819] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1697.583053] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1697.583288] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1697.583484] env[63241]: INFO nova.compute.manager [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Took 1.76 seconds to destroy the instance on the hypervisor. [ 1697.583755] env[63241]: DEBUG oslo.service.loopingcall [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1697.586680] env[63241]: DEBUG nova.compute.manager [-] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1697.586795] env[63241]: DEBUG nova.network.neutron [-] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1697.659660] env[63241]: DEBUG nova.network.neutron [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Updated VIF entry in instance network info cache for port 92b2ae26-3653-4737-891e-09a99ee68a10. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1697.660030] env[63241]: DEBUG nova.network.neutron [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Updating instance_info_cache with network_info: [{"id": "92b2ae26-3653-4737-891e-09a99ee68a10", "address": "fa:16:3e:7e:66:33", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b2ae26-36", "ovs_interfaceid": "92b2ae26-3653-4737-891e-09a99ee68a10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.724582] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820712, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.880798] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.927015] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.927423] env[63241]: DEBUG nova.compute.manager [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1697.930039] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.535s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.934020] env[63241]: DEBUG nova.objects.instance [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1698.012856] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820713, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.025147] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "c8f1ce16-70b7-41fd-8516-63198139c1cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.025426] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "c8f1ce16-70b7-41fd-8516-63198139c1cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.025647] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "c8f1ce16-70b7-41fd-8516-63198139c1cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.025859] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "c8f1ce16-70b7-41fd-8516-63198139c1cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.026065] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "c8f1ce16-70b7-41fd-8516-63198139c1cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.029347] env[63241]: INFO nova.compute.manager [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Terminating instance [ 1698.032204] env[63241]: DEBUG nova.compute.manager [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1698.032568] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1698.034880] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56011d5f-f241-4f62-af03-1cd7631400de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.043666] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1698.043959] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c3b4e7e-0045-4681-b062-9197773f5cf6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.051692] env[63241]: DEBUG oslo_vmware.api [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1698.051692] env[63241]: value = "task-1820714" [ 1698.051692] env[63241]: _type = "Task" [ 1698.051692] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.063533] env[63241]: DEBUG oslo_vmware.api [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.165060] env[63241]: DEBUG oslo_concurrency.lockutils [req-748c7706-b3d7-404b-bc64-3fa016088fdd req-b1b194f9-6d90-4a42-8ba5-118505bf3d7c service nova] Releasing lock "refresh_cache-44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.232022] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820712, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.380505] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.434644] env[63241]: DEBUG nova.compute.utils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1698.440024] env[63241]: DEBUG nova.compute.manager [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1698.440024] env[63241]: DEBUG nova.network.neutron [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1698.493455] env[63241]: DEBUG nova.policy [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78657a2bc34d4bb9922678ed287530f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18094134f49b4e84b83e97631bc22903', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1698.513713] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820713, 'name': ReconfigVM_Task, 'duration_secs': 0.778114} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.514492] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Reconfigured VM instance instance-00000051 to attach disk [datastore1] a77f7227-0285-48b8-bb3b-f5cfe7ad4646/a77f7227-0285-48b8-bb3b-f5cfe7ad4646.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1698.515137] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1682bc78-5e5b-4739-840e-bd46e5841e8d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.523531] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1698.523531] env[63241]: value = "task-1820715" [ 1698.523531] env[63241]: _type = "Task" [ 1698.523531] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.535983] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820715, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.562762] env[63241]: DEBUG oslo_vmware.api [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820714, 'name': PowerOffVM_Task, 'duration_secs': 0.194439} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.564314] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1698.564314] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1698.565574] env[63241]: DEBUG nova.compute.manager [req-dc11f6c7-5e02-41c9-b2d3-d4ec3d677931 req-d8220574-85ff-4291-8108-d4ec5264fc67 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Received event network-vif-deleted-dcdf6593-f699-4bf0-8fa5-16a49caabae8 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1698.565769] env[63241]: INFO nova.compute.manager [req-dc11f6c7-5e02-41c9-b2d3-d4ec3d677931 req-d8220574-85ff-4291-8108-d4ec5264fc67 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Neutron deleted interface dcdf6593-f699-4bf0-8fa5-16a49caabae8; detaching it from the instance and deleting it from the info cache [ 1698.565940] env[63241]: DEBUG nova.network.neutron [req-dc11f6c7-5e02-41c9-b2d3-d4ec3d677931 req-d8220574-85ff-4291-8108-d4ec5264fc67 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.566987] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-168e3554-bd4b-414c-8225-8e18e5ad97fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.643529] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38eb95bb-3e18-4742-91b1-4d42f7406c1b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.662682] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance '7f1710d0-857d-41fc-8151-8c5e129dda08' progress to 0 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1698.729155] env[63241]: DEBUG oslo_vmware.api [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820712, 'name': PowerOnVM_Task, 'duration_secs': 1.211902} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.729485] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1698.729697] env[63241]: INFO nova.compute.manager [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Took 10.13 seconds to spawn the instance on the hypervisor. [ 1698.729878] env[63241]: DEBUG nova.compute.manager [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1698.730903] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f210d89-9ce6-4bba-90c6-9142ffc068fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.762672] env[63241]: DEBUG nova.network.neutron [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Successfully created port: 43c2edfc-733f-41ab-8cd3-c132dd83c038 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1698.882868] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.940307] env[63241]: DEBUG nova.compute.manager [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1698.944802] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f6ca4e84-46ba-48be-a505-502e4310dcb1 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.945543] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.406s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.947322] env[63241]: INFO nova.compute.claims [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1699.033144] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820715, 'name': Rename_Task, 'duration_secs': 0.485149} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.033611] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1699.037020] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-175d94eb-94aa-4a40-96af-7d07a029ba2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.040621] env[63241]: DEBUG nova.network.neutron [-] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.041819] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1699.041819] env[63241]: value = "task-1820717" [ 1699.041819] env[63241]: _type = "Task" [ 1699.041819] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.050574] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820717, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.070730] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b9bf2a6c-85d7-485e-8dd6-503e302738d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.082107] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3c2b91-2d30-4aff-bfb0-ca67cc8b0030 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.128605] env[63241]: DEBUG nova.compute.manager [req-dc11f6c7-5e02-41c9-b2d3-d4ec3d677931 req-d8220574-85ff-4291-8108-d4ec5264fc67 service nova] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Detach interface failed, port_id=dcdf6593-f699-4bf0-8fa5-16a49caabae8, reason: Instance c7b034f7-1d7f-4782-9ecb-5987c35339cc could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1699.169573] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1699.169573] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5746fa08-0b7d-46ab-a5a2-2f58a108cc8b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.176387] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1699.176387] env[63241]: value = "task-1820718" [ 1699.176387] env[63241]: _type = "Task" [ 1699.176387] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.185957] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820718, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.253046] env[63241]: INFO nova.compute.manager [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Took 29.85 seconds to build instance. [ 1699.382683] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.544080] env[63241]: INFO nova.compute.manager [-] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Took 1.96 seconds to deallocate network for instance. [ 1699.559862] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820717, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.686878] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820718, 'name': PowerOffVM_Task, 'duration_secs': 0.289559} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.687184] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1699.687373] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance '7f1710d0-857d-41fc-8151-8c5e129dda08' progress to 17 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1699.755337] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f56b8dc-8bde-48f1-ac7e-d62500f9168e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "73ea6bff-60da-4691-a569-f4e9ae92f701" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.369s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.882291] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.951191] env[63241]: DEBUG nova.compute.manager [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1699.979861] env[63241]: DEBUG nova.virt.hardware [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1699.980131] env[63241]: DEBUG nova.virt.hardware [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1699.980291] env[63241]: DEBUG nova.virt.hardware [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1699.980473] env[63241]: DEBUG nova.virt.hardware [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1699.980763] env[63241]: DEBUG nova.virt.hardware [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1699.980932] env[63241]: DEBUG nova.virt.hardware [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1699.981165] env[63241]: DEBUG nova.virt.hardware [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1699.981332] env[63241]: DEBUG nova.virt.hardware [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1699.981530] env[63241]: DEBUG nova.virt.hardware [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1699.981700] env[63241]: DEBUG nova.virt.hardware [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1699.981876] env[63241]: DEBUG nova.virt.hardware [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1699.982896] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60f422c-5758-45ce-bc17-78121185d95a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.993685] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb10eca-8c25-4bf4-9547-7d8e39b7a8eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.052574] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.052904] env[63241]: DEBUG oslo_vmware.api [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820717, 'name': PowerOnVM_Task, 'duration_secs': 0.63525} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.055388] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1700.055604] env[63241]: INFO nova.compute.manager [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Took 8.93 seconds to spawn the instance on the hypervisor. [ 1700.055784] env[63241]: DEBUG nova.compute.manager [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1700.056998] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f83157-1edf-42da-a88d-804897cb26c7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.194962] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1700.194962] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1700.195294] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1700.195539] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1700.197030] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1700.197030] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1700.197030] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1700.197030] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1700.197030] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1700.197030] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1700.197271] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1700.203644] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a70d84cc-082c-4cb0-a14e-3d5895a2baa4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.225867] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1700.225867] env[63241]: value = "task-1820719" [ 1700.225867] env[63241]: _type = "Task" [ 1700.225867] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.236683] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820719, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.385159] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.400856] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f2a302-4d52-47bb-b6b4-1679eaf58b14 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.408081] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88ecd72-b555-4c3b-910f-dd0df5dc1c46 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.442737] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261b8948-91d6-461b-8f71-e3614e76f1ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.448967] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05afd5b3-340b-41f0-b9ff-b1df42842bfe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.463533] env[63241]: DEBUG nova.compute.provider_tree [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1700.577508] env[63241]: INFO nova.compute.manager [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Took 30.64 seconds to build instance. [ 1700.736256] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820719, 'name': ReconfigVM_Task, 'duration_secs': 0.178375} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.736565] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance '7f1710d0-857d-41fc-8151-8c5e129dda08' progress to 33 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1700.886636] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.967233] env[63241]: DEBUG nova.scheduler.client.report [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1701.079675] env[63241]: DEBUG oslo_concurrency.lockutils [None req-720f716d-d5eb-45f8-97c1-cf7c10b3573e tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.152s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.244270] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:28:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='6d3f31be-51b1-4783-a8b9-92005f2fb457',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-13492411',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1701.244270] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1701.244270] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1701.244270] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1701.244270] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1701.244270] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1701.244270] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1701.244270] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1701.244555] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1701.244676] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1701.244801] env[63241]: DEBUG nova.virt.hardware [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1701.251056] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Reconfiguring VM instance instance-00000048 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1701.251457] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e3749f2-de3d-4f5b-8dc9-24249c4ef9f4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.272937] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1701.272937] env[63241]: value = "task-1820720" [ 1701.272937] env[63241]: _type = "Task" [ 1701.272937] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.281887] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820720, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.386456] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.473597] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.474256] env[63241]: DEBUG nova.compute.manager [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1701.477611] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.876s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.477867] env[63241]: DEBUG nova.objects.instance [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lazy-loading 'resources' on Instance uuid 4a57d04b-72a0-4db3-8119-994b67e4b096 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1701.782801] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820720, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.888945] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.983467] env[63241]: DEBUG nova.compute.utils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1701.984956] env[63241]: DEBUG nova.compute.manager [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1701.985064] env[63241]: DEBUG nova.network.neutron [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1702.080752] env[63241]: DEBUG nova.policy [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '048a059c2e9a4feb82745da2b6959038', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e7a8a40a56ee42dca4190ac78e5f22ef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1702.290912] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820720, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.391283] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.407208] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.407400] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.407580] env[63241]: DEBUG nova.compute.manager [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1702.408602] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ae18a0-0400-4fdd-aa85-609633870418 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.414900] env[63241]: DEBUG nova.compute.manager [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63241) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1702.415890] env[63241]: DEBUG nova.objects.instance [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lazy-loading 'flavor' on Instance uuid f372d405-f7d5-4e5f-8c36-fe9651af2a0d {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1702.438874] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c14a35-922a-4028-981d-e89afba6bf92 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.447296] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57041012-b98e-4006-a076-4d1195f5f979 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.480164] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b953c3-2872-48f5-ba5d-577abe4db5d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.487902] env[63241]: DEBUG nova.compute.manager [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1702.494427] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4461900-c5a9-4d5c-a2c0-ab849eac4927 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.516218] env[63241]: DEBUG nova.compute.provider_tree [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1702.528908] env[63241]: DEBUG nova.network.neutron [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Successfully created port: aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1702.783798] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820720, 'name': ReconfigVM_Task, 'duration_secs': 1.464808} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.786271] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Reconfigured VM instance instance-00000048 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1702.786271] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5572566-a78b-4c33-a509-fa8eb46c6c9d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.811470] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 7f1710d0-857d-41fc-8151-8c5e129dda08/7f1710d0-857d-41fc-8151-8c5e129dda08.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1702.811873] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e4f53da-5277-4845-9761-455a903f51e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.830301] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1702.830301] env[63241]: value = "task-1820721" [ 1702.830301] env[63241]: _type = "Task" [ 1702.830301] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.842345] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820721, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.891109] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.923669] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1702.923931] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e816005a-4f78-4d27-af82-9a0437bd78e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.931925] env[63241]: DEBUG oslo_vmware.api [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1702.931925] env[63241]: value = "task-1820722" [ 1702.931925] env[63241]: _type = "Task" [ 1702.931925] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.940455] env[63241]: DEBUG oslo_vmware.api [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820722, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.019517] env[63241]: DEBUG nova.scheduler.client.report [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1703.340463] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820721, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.390265] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.441667] env[63241]: DEBUG oslo_vmware.api [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820722, 'name': PowerOffVM_Task, 'duration_secs': 0.208216} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.442191] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1703.443899] env[63241]: DEBUG nova.compute.manager [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1703.445019] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf67d01-f0e4-436f-a13d-3f8ed702dd66 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.512422] env[63241]: DEBUG nova.compute.manager [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1703.525244] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.048s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.527744] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.124s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.529430] env[63241]: INFO nova.compute.claims [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1703.546105] env[63241]: DEBUG nova.virt.hardware [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1703.546388] env[63241]: DEBUG nova.virt.hardware [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1703.546544] env[63241]: DEBUG nova.virt.hardware [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1703.546725] env[63241]: DEBUG nova.virt.hardware [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1703.546869] env[63241]: DEBUG nova.virt.hardware [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1703.547238] env[63241]: DEBUG nova.virt.hardware [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1703.547537] env[63241]: DEBUG nova.virt.hardware [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1703.547717] env[63241]: DEBUG nova.virt.hardware [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1703.547905] env[63241]: DEBUG nova.virt.hardware [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1703.548085] env[63241]: DEBUG nova.virt.hardware [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1703.548372] env[63241]: DEBUG nova.virt.hardware [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1703.549436] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9fb06b-e7ce-48f8-946e-5a5a0bfab39e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.560525] env[63241]: INFO nova.scheduler.client.report [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Deleted allocations for instance 4a57d04b-72a0-4db3-8119-994b67e4b096 [ 1703.562450] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c3f437-6b07-4aa6-85b8-0630cd8dfb06 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.842828] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820721, 'name': ReconfigVM_Task, 'duration_secs': 0.566169} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.842828] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 7f1710d0-857d-41fc-8151-8c5e129dda08/7f1710d0-857d-41fc-8151-8c5e129dda08.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1703.842828] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance '7f1710d0-857d-41fc-8151-8c5e129dda08' progress to 50 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1703.892155] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.904490] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1703.904490] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1703.904600] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleting the datastore file [datastore1] c8f1ce16-70b7-41fd-8516-63198139c1cc {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1703.905193] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-674fd0b3-c3fe-41bd-b010-215248b0326f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.912159] env[63241]: DEBUG oslo_vmware.api [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for the task: (returnval){ [ 1703.912159] env[63241]: value = "task-1820723" [ 1703.912159] env[63241]: _type = "Task" [ 1703.912159] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.921193] env[63241]: DEBUG oslo_vmware.api [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820723, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.961687] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1c6255c8-3d28-4b3f-b530-81d2a9d4cc8f tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.554s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.073329] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0357123f-6472-41e6-b6d3-351ff1267bcc tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "4a57d04b-72a0-4db3-8119-994b67e4b096" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.198s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.348738] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc6f08a-33df-448e-8dcf-e872a1ad90d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.371165] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b04234d-49a8-412a-aad7-1e4f17803cb8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.390647] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance '7f1710d0-857d-41fc-8151-8c5e129dda08' progress to 67 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1704.403052] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820710, 'name': CreateVM_Task, 'duration_secs': 7.325947} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.403232] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1704.403921] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.404089] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.404402] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1704.404649] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ab76a0f-721f-4d42-9d6b-86980747838b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.408966] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1704.408966] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5255c475-24b0-e200-a2cc-fb2d44db0921" [ 1704.408966] env[63241]: _type = "Task" [ 1704.408966] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.418330] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5255c475-24b0-e200-a2cc-fb2d44db0921, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.423349] env[63241]: DEBUG oslo_vmware.api [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Task: {'id': task-1820723, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292115} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.423617] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1704.423832] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1704.424048] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1704.424270] env[63241]: INFO nova.compute.manager [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Took 6.39 seconds to destroy the instance on the hypervisor. [ 1704.424563] env[63241]: DEBUG oslo.service.loopingcall [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1704.424762] env[63241]: DEBUG nova.compute.manager [-] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1704.424884] env[63241]: DEBUG nova.network.neutron [-] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1704.485077] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "1dc98fbd-a52b-42fa-8d37-d14318dbc941" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.485409] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "1dc98fbd-a52b-42fa-8d37-d14318dbc941" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.485803] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "1dc98fbd-a52b-42fa-8d37-d14318dbc941-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.486215] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "1dc98fbd-a52b-42fa-8d37-d14318dbc941-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.486521] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "1dc98fbd-a52b-42fa-8d37-d14318dbc941-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.489425] env[63241]: INFO nova.compute.manager [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Terminating instance [ 1704.491629] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "refresh_cache-1dc98fbd-a52b-42fa-8d37-d14318dbc941" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.491898] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquired lock "refresh_cache-1dc98fbd-a52b-42fa-8d37-d14318dbc941" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.492205] env[63241]: DEBUG nova.network.neutron [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1704.917119] env[63241]: DEBUG nova.network.neutron [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Successfully updated port: 43c2edfc-733f-41ab-8cd3-c132dd83c038 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1704.920729] env[63241]: DEBUG nova.compute.manager [req-1fcce241-275f-4de6-a22b-708d314adf73 req-09578da3-6530-4cf9-b893-aea094bfe95b service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Received event network-vif-plugged-43c2edfc-733f-41ab-8cd3-c132dd83c038 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1704.920941] env[63241]: DEBUG oslo_concurrency.lockutils [req-1fcce241-275f-4de6-a22b-708d314adf73 req-09578da3-6530-4cf9-b893-aea094bfe95b service nova] Acquiring lock "381bba62-49a7-4d6f-b12a-741f5d884fe5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.921177] env[63241]: DEBUG oslo_concurrency.lockutils [req-1fcce241-275f-4de6-a22b-708d314adf73 req-09578da3-6530-4cf9-b893-aea094bfe95b service nova] Lock "381bba62-49a7-4d6f-b12a-741f5d884fe5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.921347] env[63241]: DEBUG oslo_concurrency.lockutils [req-1fcce241-275f-4de6-a22b-708d314adf73 req-09578da3-6530-4cf9-b893-aea094bfe95b service nova] Lock "381bba62-49a7-4d6f-b12a-741f5d884fe5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.921510] env[63241]: DEBUG nova.compute.manager [req-1fcce241-275f-4de6-a22b-708d314adf73 req-09578da3-6530-4cf9-b893-aea094bfe95b service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] No waiting events found dispatching network-vif-plugged-43c2edfc-733f-41ab-8cd3-c132dd83c038 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1704.921674] env[63241]: WARNING nova.compute.manager [req-1fcce241-275f-4de6-a22b-708d314adf73 req-09578da3-6530-4cf9-b893-aea094bfe95b service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Received unexpected event network-vif-plugged-43c2edfc-733f-41ab-8cd3-c132dd83c038 for instance with vm_state building and task_state spawning. [ 1704.934513] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5255c475-24b0-e200-a2cc-fb2d44db0921, 'name': SearchDatastore_Task, 'duration_secs': 0.015999} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.934894] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.935157] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1704.935407] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.935861] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.936256] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1704.937015] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d97b9b2f-7879-444f-89c8-80f7a0625bf9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.949467] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1704.949646] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1704.950468] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0148d6e-f45f-44f2-b4f2-24b1f3bd3725 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.957399] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1704.957399] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523d9bda-7969-3ed2-78b1-30ad3deea809" [ 1704.957399] env[63241]: _type = "Task" [ 1704.957399] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.969156] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523d9bda-7969-3ed2-78b1-30ad3deea809, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.971868] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b9827e-de29-454a-83f0-00cafd07ed11 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.978742] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cc11fd-b9c2-472b-ac9c-b9b4f1904b9c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.012916] env[63241]: DEBUG nova.objects.instance [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lazy-loading 'flavor' on Instance uuid f372d405-f7d5-4e5f-8c36-fe9651af2a0d {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1705.016528] env[63241]: DEBUG nova.network.neutron [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Port 56f1b482-fc2c-45e5-9aca-99ff209a166e binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1705.018533] env[63241]: DEBUG nova.compute.manager [req-9f5b03f1-b507-4052-ba45-347b39ef4b73 req-0369e386-b2bd-4be0-aa8e-6a0b5b9af498 service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Received event network-vif-deleted-315b118d-b5f6-4f70-9ea2-76028cc6344d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1705.018722] env[63241]: INFO nova.compute.manager [req-9f5b03f1-b507-4052-ba45-347b39ef4b73 req-0369e386-b2bd-4be0-aa8e-6a0b5b9af498 service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Neutron deleted interface 315b118d-b5f6-4f70-9ea2-76028cc6344d; detaching it from the instance and deleting it from the info cache [ 1705.019935] env[63241]: DEBUG nova.network.neutron [req-9f5b03f1-b507-4052-ba45-347b39ef4b73 req-0369e386-b2bd-4be0-aa8e-6a0b5b9af498 service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.020291] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8b463a-32db-4e68-92b8-d3697d2861e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.032646] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536519d2-cf5e-4f57-913c-34044c84c73f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.047897] env[63241]: DEBUG nova.compute.provider_tree [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1705.051989] env[63241]: DEBUG nova.network.neutron [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1705.105982] env[63241]: DEBUG nova.network.neutron [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.289925] env[63241]: DEBUG nova.network.neutron [-] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.406445] env[63241]: DEBUG nova.network.neutron [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Successfully updated port: aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1705.423617] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "refresh_cache-381bba62-49a7-4d6f-b12a-741f5d884fe5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.424361] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "refresh_cache-381bba62-49a7-4d6f-b12a-741f5d884fe5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.425122] env[63241]: DEBUG nova.network.neutron [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1705.471377] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523d9bda-7969-3ed2-78b1-30ad3deea809, 'name': SearchDatastore_Task, 'duration_secs': 0.011863} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.473162] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d6b4a32-9129-4a34-b166-2cb46e211c76 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.480255] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1705.480255] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52860170-97ab-a442-a4ed-b51e3aff8748" [ 1705.480255] env[63241]: _type = "Task" [ 1705.480255] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.488845] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52860170-97ab-a442-a4ed-b51e3aff8748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.518630] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "refresh_cache-f372d405-f7d5-4e5f-8c36-fe9651af2a0d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.518830] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquired lock "refresh_cache-f372d405-f7d5-4e5f-8c36-fe9651af2a0d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.519020] env[63241]: DEBUG nova.network.neutron [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1705.519242] env[63241]: DEBUG nova.objects.instance [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lazy-loading 'info_cache' on Instance uuid f372d405-f7d5-4e5f-8c36-fe9651af2a0d {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1705.528263] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9c36176-c174-43ed-8eff-3fd95927053e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.537675] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867430fa-b703-444b-8900-17b2b47c73c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.293019] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Releasing lock "refresh_cache-1dc98fbd-a52b-42fa-8d37-d14318dbc941" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.293019] env[63241]: DEBUG nova.compute.manager [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1706.293019] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1706.293019] env[63241]: INFO nova.compute.manager [-] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Took 1.87 seconds to deallocate network for instance. [ 1706.293019] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquiring lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1706.293019] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquired lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1706.293019] env[63241]: DEBUG nova.network.neutron [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1706.294704] env[63241]: DEBUG nova.objects.base [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1706.305443] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf99198-67d6-4eb8-b180-ecc9995073ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.325746] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "7f1710d0-857d-41fc-8151-8c5e129dda08-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.325746] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "7f1710d0-857d-41fc-8151-8c5e129dda08-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.325746] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "7f1710d0-857d-41fc-8151-8c5e129dda08-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.344887] env[63241]: DEBUG nova.compute.manager [req-9f5b03f1-b507-4052-ba45-347b39ef4b73 req-0369e386-b2bd-4be0-aa8e-6a0b5b9af498 service nova] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Detach interface failed, port_id=315b118d-b5f6-4f70-9ea2-76028cc6344d, reason: Instance c8f1ce16-70b7-41fd-8516-63198139c1cc could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1706.345369] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1706.345551] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52860170-97ab-a442-a4ed-b51e3aff8748, 'name': SearchDatastore_Task, 'duration_secs': 0.009524} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.346850] env[63241]: DEBUG nova.scheduler.client.report [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 121 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1706.347066] env[63241]: DEBUG nova.compute.provider_tree [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 121 to 122 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1706.347245] env[63241]: DEBUG nova.compute.provider_tree [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1706.350474] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-628e30ad-077f-488f-b257-64c812cc348a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.354024] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.354024] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490/44ddb1f0-fd5c-4c9e-baf2-eec09d80f490.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1706.354024] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3862b131-8a23-4ec2-b4b9-687761e1f092 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.363477] env[63241]: DEBUG oslo_vmware.api [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1706.363477] env[63241]: value = "task-1820724" [ 1706.363477] env[63241]: _type = "Task" [ 1706.363477] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.363734] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1706.363734] env[63241]: value = "task-1820725" [ 1706.363734] env[63241]: _type = "Task" [ 1706.363734] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.377162] env[63241]: DEBUG oslo_vmware.api [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820724, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.380635] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.393821] env[63241]: DEBUG nova.network.neutron [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1706.544619] env[63241]: INFO nova.compute.manager [None req-5983175f-9a23-4ca7-8291-98f5dce95585 tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Get console output [ 1706.544910] env[63241]: WARNING nova.virt.vmwareapi.driver [None req-5983175f-9a23-4ca7-8291-98f5dce95585 tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] The console log is missing. Check your VSPC configuration [ 1706.623041] env[63241]: DEBUG nova.network.neutron [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Updating instance_info_cache with network_info: [{"id": "43c2edfc-733f-41ab-8cd3-c132dd83c038", "address": "fa:16:3e:d6:a3:8c", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43c2edfc-73", "ovs_interfaceid": "43c2edfc-733f-41ab-8cd3-c132dd83c038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.823044] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.844166] env[63241]: DEBUG nova.network.neutron [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1706.855655] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.328s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.858829] env[63241]: DEBUG nova.compute.manager [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1706.859537] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.404s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.861093] env[63241]: INFO nova.compute.claims [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1706.885654] env[63241]: DEBUG oslo_vmware.api [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820724, 'name': PowerOffVM_Task, 'duration_secs': 0.332257} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.886365] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1706.886569] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1706.886838] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820725, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.887074] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d978e635-5c9b-4ddf-971f-8cd17dabba87 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.915043] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1706.915043] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1706.915043] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Deleting the datastore file [datastore1] 1dc98fbd-a52b-42fa-8d37-d14318dbc941 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1706.915043] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e0d08cc-66eb-4412-b45d-04c6868ef772 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.926740] env[63241]: DEBUG oslo_vmware.api [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for the task: (returnval){ [ 1706.926740] env[63241]: value = "task-1820727" [ 1706.926740] env[63241]: _type = "Task" [ 1706.926740] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.941104] env[63241]: DEBUG oslo_vmware.api [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820727, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.996629] env[63241]: DEBUG nova.compute.manager [req-6fde5e79-62d1-4c22-b101-95470edf481f req-6dca618e-945f-4e7e-bf4f-004ea818afa1 service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Received event network-changed-43c2edfc-733f-41ab-8cd3-c132dd83c038 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1706.996863] env[63241]: DEBUG nova.compute.manager [req-6fde5e79-62d1-4c22-b101-95470edf481f req-6dca618e-945f-4e7e-bf4f-004ea818afa1 service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Refreshing instance network info cache due to event network-changed-43c2edfc-733f-41ab-8cd3-c132dd83c038. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1706.997096] env[63241]: DEBUG oslo_concurrency.lockutils [req-6fde5e79-62d1-4c22-b101-95470edf481f req-6dca618e-945f-4e7e-bf4f-004ea818afa1 service nova] Acquiring lock "refresh_cache-381bba62-49a7-4d6f-b12a-741f5d884fe5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.003433] env[63241]: DEBUG nova.network.neutron [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updating instance_info_cache with network_info: [{"id": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "address": "fa:16:3e:d2:d7:ea", "network": {"id": "d35148d1-0dba-4e39-9bc1-ad561f93e9f8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1579993511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e7a8a40a56ee42dca4190ac78e5f22ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c2b89fb-df8c-47c0-83ae-44291236feb4", "external-id": "nsx-vlan-transportzone-174", "segmentation_id": 174, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac5cae6-11", "ovs_interfaceid": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1707.077946] env[63241]: DEBUG nova.compute.manager [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Received event network-vif-plugged-aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1707.078194] env[63241]: DEBUG oslo_concurrency.lockutils [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] Acquiring lock "f65e5b00-38b5-4453-b370-1f56f18053eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.078410] env[63241]: DEBUG oslo_concurrency.lockutils [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] Lock "f65e5b00-38b5-4453-b370-1f56f18053eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.078572] env[63241]: DEBUG oslo_concurrency.lockutils [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] Lock "f65e5b00-38b5-4453-b370-1f56f18053eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.078738] env[63241]: DEBUG nova.compute.manager [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] No waiting events found dispatching network-vif-plugged-aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1707.078901] env[63241]: WARNING nova.compute.manager [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Received unexpected event network-vif-plugged-aac5cae6-1124-4f0a-9270-ff1f4982fff4 for instance with vm_state building and task_state spawning. [ 1707.079281] env[63241]: DEBUG nova.compute.manager [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Received event network-changed-aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1707.079516] env[63241]: DEBUG nova.compute.manager [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Refreshing instance network info cache due to event network-changed-aac5cae6-1124-4f0a-9270-ff1f4982fff4. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1707.079638] env[63241]: DEBUG oslo_concurrency.lockutils [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] Acquiring lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.125775] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "refresh_cache-381bba62-49a7-4d6f-b12a-741f5d884fe5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1707.126182] env[63241]: DEBUG nova.compute.manager [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Instance network_info: |[{"id": "43c2edfc-733f-41ab-8cd3-c132dd83c038", "address": "fa:16:3e:d6:a3:8c", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43c2edfc-73", "ovs_interfaceid": "43c2edfc-733f-41ab-8cd3-c132dd83c038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1707.126497] env[63241]: DEBUG oslo_concurrency.lockutils [req-6fde5e79-62d1-4c22-b101-95470edf481f req-6dca618e-945f-4e7e-bf4f-004ea818afa1 service nova] Acquired lock "refresh_cache-381bba62-49a7-4d6f-b12a-741f5d884fe5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.126675] env[63241]: DEBUG nova.network.neutron [req-6fde5e79-62d1-4c22-b101-95470edf481f req-6dca618e-945f-4e7e-bf4f-004ea818afa1 service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Refreshing network info cache for port 43c2edfc-733f-41ab-8cd3-c132dd83c038 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1707.132016] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:a3:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '184687d6-125a-4b58-bb5b-fdb404088eda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43c2edfc-733f-41ab-8cd3-c132dd83c038', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1707.138034] env[63241]: DEBUG oslo.service.loopingcall [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1707.139080] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1707.140410] env[63241]: DEBUG nova.network.neutron [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Updating instance_info_cache with network_info: [{"id": "68ec05cb-7eaf-4904-b491-0f5d3bb27936", "address": "fa:16:3e:3b:43:74", "network": {"id": "909278be-6b42-43b3-ae32-decd720bc5f5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-138389207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2338858c7214e8286c5948da80ffc1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68ec05cb-7e", "ovs_interfaceid": "68ec05cb-7eaf-4904-b491-0f5d3bb27936", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1707.142112] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e994b821-6d91-4b16-8501-83069b973905 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.161208] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Releasing lock "refresh_cache-f372d405-f7d5-4e5f-8c36-fe9651af2a0d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1707.167435] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1707.167435] env[63241]: value = "task-1820728" [ 1707.167435] env[63241]: _type = "Task" [ 1707.167435] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.176786] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820728, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.370885] env[63241]: DEBUG nova.compute.utils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1707.372781] env[63241]: DEBUG nova.compute.manager [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1707.373261] env[63241]: DEBUG nova.network.neutron [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1707.388181] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563476} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.389419] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490/44ddb1f0-fd5c-4c9e-baf2-eec09d80f490.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1707.389721] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1707.390093] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8c00d3a-b9a0-4622-b15d-ce5e74631df9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.393272] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.393615] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.393834] env[63241]: DEBUG nova.network.neutron [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1707.400979] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1707.400979] env[63241]: value = "task-1820729" [ 1707.400979] env[63241]: _type = "Task" [ 1707.400979] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.412382] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820729, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.423033] env[63241]: DEBUG nova.policy [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16508acd49bf4efba4f9c509a2dc5fd4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d1a62ae45c74a7ba071363005b3a52e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1707.437780] env[63241]: DEBUG oslo_vmware.api [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Task: {'id': task-1820727, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.366375} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.438057] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1707.438247] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1707.438432] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1707.438597] env[63241]: INFO nova.compute.manager [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1707.438834] env[63241]: DEBUG oslo.service.loopingcall [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1707.439037] env[63241]: DEBUG nova.compute.manager [-] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1707.439136] env[63241]: DEBUG nova.network.neutron [-] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1707.455270] env[63241]: DEBUG nova.network.neutron [-] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1707.506940] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Releasing lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1707.509913] env[63241]: DEBUG nova.compute.manager [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Instance network_info: |[{"id": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "address": "fa:16:3e:d2:d7:ea", "network": {"id": "d35148d1-0dba-4e39-9bc1-ad561f93e9f8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1579993511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e7a8a40a56ee42dca4190ac78e5f22ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c2b89fb-df8c-47c0-83ae-44291236feb4", "external-id": "nsx-vlan-transportzone-174", "segmentation_id": 174, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac5cae6-11", "ovs_interfaceid": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1707.509913] env[63241]: DEBUG oslo_concurrency.lockutils [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] Acquired lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.509913] env[63241]: DEBUG nova.network.neutron [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Refreshing network info cache for port aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1707.511674] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:d7:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1c2b89fb-df8c-47c0-83ae-44291236feb4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aac5cae6-1124-4f0a-9270-ff1f4982fff4', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1707.522786] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Creating folder: Project (e7a8a40a56ee42dca4190ac78e5f22ef). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1707.524579] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8df55f9c-3820-41a6-880f-f7a91206c8e3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.538368] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Created folder: Project (e7a8a40a56ee42dca4190ac78e5f22ef) in parent group-v376927. [ 1707.538368] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Creating folder: Instances. Parent ref: group-v377147. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1707.538368] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5de5b9d-25d8-4d8c-9e3b-be644568d462 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.548342] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Created folder: Instances in parent group-v377147. [ 1707.548902] env[63241]: DEBUG oslo.service.loopingcall [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1707.549273] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1707.549648] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47ae9320-9a8d-436a-9e9f-ecd7d108dc83 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.570559] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1707.570559] env[63241]: value = "task-1820732" [ 1707.570559] env[63241]: _type = "Task" [ 1707.570559] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.579993] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820732, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.669020] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1707.669020] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3626351-cd87-4d7b-8335-0e8fbbedde09 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.686021] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820728, 'name': CreateVM_Task, 'duration_secs': 0.488703} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.686021] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1707.686021] env[63241]: DEBUG oslo_vmware.api [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1707.686021] env[63241]: value = "task-1820733" [ 1707.686021] env[63241]: _type = "Task" [ 1707.686021] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.686540] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.686844] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.687325] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1707.687752] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a840e65e-044a-4692-9230-b63371990335 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.693043] env[63241]: DEBUG nova.network.neutron [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Successfully created port: 3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1707.698436] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1707.698436] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52227dd5-772b-d11a-8728-b3da9370a020" [ 1707.698436] env[63241]: _type = "Task" [ 1707.698436] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.702779] env[63241]: DEBUG oslo_vmware.api [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820733, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.713545] env[63241]: DEBUG oslo_concurrency.lockutils [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Acquiring lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.713545] env[63241]: DEBUG oslo_concurrency.lockutils [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.713545] env[63241]: DEBUG oslo_concurrency.lockutils [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Acquiring lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.713545] env[63241]: DEBUG oslo_concurrency.lockutils [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.713545] env[63241]: DEBUG oslo_concurrency.lockutils [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.714492] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52227dd5-772b-d11a-8728-b3da9370a020, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.715137] env[63241]: INFO nova.compute.manager [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Terminating instance [ 1707.717516] env[63241]: DEBUG nova.compute.manager [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1707.717952] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1707.718362] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e01973f-98f9-437e-ab83-030953dcab52 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.728244] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Waiting for the task: (returnval){ [ 1707.728244] env[63241]: value = "task-1820734" [ 1707.728244] env[63241]: _type = "Task" [ 1707.728244] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.737157] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820734, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.879711] env[63241]: DEBUG nova.compute.manager [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1707.920064] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820729, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07202} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.920582] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1707.922302] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb03e8e-8f15-40bb-98ec-2e5920a5a103 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.950468] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490/44ddb1f0-fd5c-4c9e-baf2-eec09d80f490.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1707.956809] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d06af4e-2c35-4cde-b0f1-4491ef6550fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.979425] env[63241]: DEBUG nova.network.neutron [-] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1707.986735] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1707.986735] env[63241]: value = "task-1820735" [ 1707.986735] env[63241]: _type = "Task" [ 1707.986735] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.999994] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820735, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.082691] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820732, 'name': CreateVM_Task, 'duration_secs': 0.458511} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.082865] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1708.083570] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1708.208274] env[63241]: DEBUG oslo_vmware.api [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820733, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.225235] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52227dd5-772b-d11a-8728-b3da9370a020, 'name': SearchDatastore_Task, 'duration_secs': 0.018334} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.225620] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.226634] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1708.226634] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1708.226634] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1708.226634] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1708.226797] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1708.227035] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1708.228670] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e770fcf-ada8-479b-ba22-6012fe03d713 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.230579] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58519798-99b8-49eb-a034-de4f8b5b3f4c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.242889] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1708.242889] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e70e72-f326-b266-552b-0aa3ec618a5a" [ 1708.242889] env[63241]: _type = "Task" [ 1708.242889] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.246016] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820734, 'name': PowerOffVM_Task, 'duration_secs': 0.270737} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.252206] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1708.252206] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1708.252206] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377005', 'volume_id': 'f8c6db70-e484-49c7-8211-edd49f1c6d75', 'name': 'volume-f8c6db70-e484-49c7-8211-edd49f1c6d75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9361ee6a-7c4d-4409-bc3c-7da7d4550d97', 'attached_at': '', 'detached_at': '', 'volume_id': 'f8c6db70-e484-49c7-8211-edd49f1c6d75', 'serial': 'f8c6db70-e484-49c7-8211-edd49f1c6d75'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1708.252206] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91bf3df-7a14-4f66-a9dc-43e731f0837c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.254260] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1708.254619] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1708.260270] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-580966cb-4b0b-4558-9dad-6bd54836b11c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.260695] env[63241]: DEBUG nova.network.neutron [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updated VIF entry in instance network info cache for port aac5cae6-1124-4f0a-9270-ff1f4982fff4. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1708.261047] env[63241]: DEBUG nova.network.neutron [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updating instance_info_cache with network_info: [{"id": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "address": "fa:16:3e:d2:d7:ea", "network": {"id": "d35148d1-0dba-4e39-9bc1-ad561f93e9f8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1579993511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e7a8a40a56ee42dca4190ac78e5f22ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c2b89fb-df8c-47c0-83ae-44291236feb4", "external-id": "nsx-vlan-transportzone-174", "segmentation_id": 174, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac5cae6-11", "ovs_interfaceid": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.265726] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e70e72-f326-b266-552b-0aa3ec618a5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.285022] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1708.285022] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52988dcc-bc15-0a0b-3b90-36e3d64f6cf3" [ 1708.285022] env[63241]: _type = "Task" [ 1708.285022] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.289906] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f0551c-8efe-4b50-b3c0-8b8be3e51d26 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.293483] env[63241]: DEBUG nova.network.neutron [req-6fde5e79-62d1-4c22-b101-95470edf481f req-6dca618e-945f-4e7e-bf4f-004ea818afa1 service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Updated VIF entry in instance network info cache for port 43c2edfc-733f-41ab-8cd3-c132dd83c038. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1708.293808] env[63241]: DEBUG nova.network.neutron [req-6fde5e79-62d1-4c22-b101-95470edf481f req-6dca618e-945f-4e7e-bf4f-004ea818afa1 service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Updating instance_info_cache with network_info: [{"id": "43c2edfc-733f-41ab-8cd3-c132dd83c038", "address": "fa:16:3e:d6:a3:8c", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43c2edfc-73", "ovs_interfaceid": "43c2edfc-733f-41ab-8cd3-c132dd83c038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.305111] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52988dcc-bc15-0a0b-3b90-36e3d64f6cf3, 'name': SearchDatastore_Task, 'duration_secs': 0.014672} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.309426] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec0907be-ff3f-4a1d-8940-927f37d1504f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.311351] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85816464-260f-4637-903c-cd6ad1ef2473 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.320757] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1708.320757] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526579ed-cac7-9ff4-dfb8-0046bc7b7b50" [ 1708.320757] env[63241]: _type = "Task" [ 1708.320757] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.340672] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6def9299-f7f6-4bec-8a84-010b0caf0742 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.351357] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526579ed-cac7-9ff4-dfb8-0046bc7b7b50, 'name': SearchDatastore_Task, 'duration_secs': 0.017378} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.362038] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.362239] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 381bba62-49a7-4d6f-b12a-741f5d884fe5/381bba62-49a7-4d6f-b12a-741f5d884fe5.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1708.362865] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] The volume has not been displaced from its original location: [datastore1] volume-f8c6db70-e484-49c7-8211-edd49f1c6d75/volume-f8c6db70-e484-49c7-8211-edd49f1c6d75.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1708.368146] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Reconfiguring VM instance instance-0000002c to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1708.370974] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ead6e1a-fe63-4d4d-a83d-04122a8b598a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.373071] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-663b09ed-30bd-4c98-a0f9-0eb8643fb7ba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.395069] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Waiting for the task: (returnval){ [ 1708.395069] env[63241]: value = "task-1820737" [ 1708.395069] env[63241]: _type = "Task" [ 1708.395069] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.395636] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1708.395636] env[63241]: value = "task-1820736" [ 1708.395636] env[63241]: _type = "Task" [ 1708.395636] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.409705] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820737, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.412678] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820736, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.466039] env[63241]: DEBUG nova.network.neutron [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance_info_cache with network_info: [{"id": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "address": "fa:16:3e:72:df:ac", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.145", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f1b482-fc", "ovs_interfaceid": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.482505] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f850c6-b86c-48fc-8827-dadae3796981 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.486630] env[63241]: INFO nova.compute.manager [-] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Took 1.05 seconds to deallocate network for instance. [ 1708.497995] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275b9ca4-97f9-436c-98f0-ca4496c097a9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.504940] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820735, 'name': ReconfigVM_Task, 'duration_secs': 0.497113} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.505693] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490/44ddb1f0-fd5c-4c9e-baf2-eec09d80f490.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1708.506351] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c91427f-6fdc-47d0-9b26-cae34f640fc3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.534635] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3506a2e-b67d-4fc1-94a7-e07f04eb5899 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.539234] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1708.539234] env[63241]: value = "task-1820738" [ 1708.539234] env[63241]: _type = "Task" [ 1708.539234] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.545586] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7dc12f-ef7b-489a-b367-95185fb0957a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.556207] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820738, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.564153] env[63241]: DEBUG nova.compute.provider_tree [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1708.699058] env[63241]: DEBUG oslo_vmware.api [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820733, 'name': PowerOnVM_Task, 'duration_secs': 0.755432} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.699355] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1708.699543] env[63241]: DEBUG nova.compute.manager [None req-9f2e63f1-1270-4ca1-90e1-3f645b3f1f32 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1708.700366] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918443e5-3610-42a9-a6d4-8b9a982d103d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.756971] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e70e72-f326-b266-552b-0aa3ec618a5a, 'name': SearchDatastore_Task, 'duration_secs': 0.023691} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.757345] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.757569] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1708.757801] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1708.757947] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1708.758140] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1708.758396] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5139fad-bfb0-4d60-ad79-9d3506258cf9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.767370] env[63241]: DEBUG oslo_concurrency.lockutils [req-8ae46b5d-2f81-46a9-8ff5-782fe2d3acd8 req-256178c5-461d-4138-b798-00103bf69cd4 service nova] Releasing lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.768351] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1708.768523] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1708.769243] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bede4752-a2f0-4f6b-9b3b-88c471be3577 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.774296] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1708.774296] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520e789d-efdc-21a5-ebb4-d9075c3ba70d" [ 1708.774296] env[63241]: _type = "Task" [ 1708.774296] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.781665] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520e789d-efdc-21a5-ebb4-d9075c3ba70d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.799466] env[63241]: DEBUG oslo_concurrency.lockutils [req-6fde5e79-62d1-4c22-b101-95470edf481f req-6dca618e-945f-4e7e-bf4f-004ea818afa1 service nova] Releasing lock "refresh_cache-381bba62-49a7-4d6f-b12a-741f5d884fe5" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.898124] env[63241]: DEBUG nova.compute.manager [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1708.911402] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820736, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.914387] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820737, 'name': ReconfigVM_Task, 'duration_secs': 0.282907} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.914633] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Reconfigured VM instance instance-0000002c to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1708.923032] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd75f54f-ca1c-4103-9b43-fdf5a6cc7a5b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.937810] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Waiting for the task: (returnval){ [ 1708.937810] env[63241]: value = "task-1820739" [ 1708.937810] env[63241]: _type = "Task" [ 1708.937810] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.940689] env[63241]: DEBUG nova.virt.hardware [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1708.940931] env[63241]: DEBUG nova.virt.hardware [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1708.941108] env[63241]: DEBUG nova.virt.hardware [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1708.941302] env[63241]: DEBUG nova.virt.hardware [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1708.941451] env[63241]: DEBUG nova.virt.hardware [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1708.941600] env[63241]: DEBUG nova.virt.hardware [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1708.941816] env[63241]: DEBUG nova.virt.hardware [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1708.941979] env[63241]: DEBUG nova.virt.hardware [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1708.942169] env[63241]: DEBUG nova.virt.hardware [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1708.942335] env[63241]: DEBUG nova.virt.hardware [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1708.942509] env[63241]: DEBUG nova.virt.hardware [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1708.943487] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1365e9-c5d3-4153-80af-efa5b8b57790 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.960093] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cb1fa2-13c5-4fc3-aa08-2e4217d6d69d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.963754] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820739, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.974388] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.995945] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.052503] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820738, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.100774] env[63241]: DEBUG nova.scheduler.client.report [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1709.101250] env[63241]: DEBUG nova.compute.provider_tree [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 122 to 123 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1709.101413] env[63241]: DEBUG nova.compute.provider_tree [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1709.178677] env[63241]: DEBUG nova.compute.manager [req-22e2dc3a-a2d2-49c7-b0f2-c381c4c4dc16 req-b3e8578d-6a9b-4f91-b412-8e4a9e20f221 service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] Received event network-vif-plugged-3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1709.179176] env[63241]: DEBUG oslo_concurrency.lockutils [req-22e2dc3a-a2d2-49c7-b0f2-c381c4c4dc16 req-b3e8578d-6a9b-4f91-b412-8e4a9e20f221 service nova] Acquiring lock "31998a62-70f5-4205-89b9-df8312916126-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.179525] env[63241]: DEBUG oslo_concurrency.lockutils [req-22e2dc3a-a2d2-49c7-b0f2-c381c4c4dc16 req-b3e8578d-6a9b-4f91-b412-8e4a9e20f221 service nova] Lock "31998a62-70f5-4205-89b9-df8312916126-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.179927] env[63241]: DEBUG oslo_concurrency.lockutils [req-22e2dc3a-a2d2-49c7-b0f2-c381c4c4dc16 req-b3e8578d-6a9b-4f91-b412-8e4a9e20f221 service nova] Lock "31998a62-70f5-4205-89b9-df8312916126-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.180417] env[63241]: DEBUG nova.compute.manager [req-22e2dc3a-a2d2-49c7-b0f2-c381c4c4dc16 req-b3e8578d-6a9b-4f91-b412-8e4a9e20f221 service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] No waiting events found dispatching network-vif-plugged-3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1709.180417] env[63241]: WARNING nova.compute.manager [req-22e2dc3a-a2d2-49c7-b0f2-c381c4c4dc16 req-b3e8578d-6a9b-4f91-b412-8e4a9e20f221 service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] Received unexpected event network-vif-plugged-3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7 for instance with vm_state building and task_state spawning. [ 1709.287449] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520e789d-efdc-21a5-ebb4-d9075c3ba70d, 'name': SearchDatastore_Task, 'duration_secs': 0.018027} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.288308] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a50dfa41-8729-44e9-bb5e-9550383d60d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.294683] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1709.294683] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522810fe-a6d1-b635-6f6e-0509019516f1" [ 1709.294683] env[63241]: _type = "Task" [ 1709.294683] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.305743] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522810fe-a6d1-b635-6f6e-0509019516f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.411075] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820736, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.921655} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.411383] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 381bba62-49a7-4d6f-b12a-741f5d884fe5/381bba62-49a7-4d6f-b12a-741f5d884fe5.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1709.411595] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1709.411846] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6f68178-558f-487c-bcfa-7c2c1f2856f4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.418286] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1709.418286] env[63241]: value = "task-1820740" [ 1709.418286] env[63241]: _type = "Task" [ 1709.418286] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.429021] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820740, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.459921] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820739, 'name': ReconfigVM_Task, 'duration_secs': 0.168012} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.462051] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377005', 'volume_id': 'f8c6db70-e484-49c7-8211-edd49f1c6d75', 'name': 'volume-f8c6db70-e484-49c7-8211-edd49f1c6d75', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9361ee6a-7c4d-4409-bc3c-7da7d4550d97', 'attached_at': '', 'detached_at': '', 'volume_id': 'f8c6db70-e484-49c7-8211-edd49f1c6d75', 'serial': 'f8c6db70-e484-49c7-8211-edd49f1c6d75'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1709.462667] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1709.463573] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0054b4-3e81-4667-8690-8bc61059d9d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.474030] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1709.474264] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ce24d76-605f-462d-a446-0ba7bce3aa9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.476658] env[63241]: DEBUG nova.network.neutron [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Successfully updated port: 3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1709.507069] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d82838f-0fc2-413b-ba11-e644d246ce7a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.528083] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ba1ec0-9bc1-4bc1-bbe3-ef295b51aaa1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.535781] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance '7f1710d0-857d-41fc-8151-8c5e129dda08' progress to 83 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1709.552205] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820738, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.561483] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1709.561752] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1709.562326] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Deleting the datastore file [datastore1] 9361ee6a-7c4d-4409-bc3c-7da7d4550d97 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1709.562645] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a9842f3-554d-47b7-8690-ebaaf2b947c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.569167] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Waiting for the task: (returnval){ [ 1709.569167] env[63241]: value = "task-1820742" [ 1709.569167] env[63241]: _type = "Task" [ 1709.569167] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.582692] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820742, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.606191] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.747s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.606690] env[63241]: DEBUG nova.compute.manager [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1709.609311] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 20.856s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.806916] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522810fe-a6d1-b635-6f6e-0509019516f1, 'name': SearchDatastore_Task, 'duration_secs': 0.014648} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.807248] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1709.807518] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f65e5b00-38b5-4453-b370-1f56f18053eb/f65e5b00-38b5-4453-b370-1f56f18053eb.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1709.807776] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d8767f0-b188-4370-bd0f-b9cad85aac36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.814436] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1709.814436] env[63241]: value = "task-1820743" [ 1709.814436] env[63241]: _type = "Task" [ 1709.814436] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.823774] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.928039] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820740, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100176} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.928436] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1709.929144] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e46832-f895-4d9f-b9c4-7bb09a3d6f10 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.951224] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 381bba62-49a7-4d6f-b12a-741f5d884fe5/381bba62-49a7-4d6f-b12a-741f5d884fe5.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1709.951997] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38c86e52-857c-4fb9-9d4d-bd19b0505d43 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.971851] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1709.971851] env[63241]: value = "task-1820744" [ 1709.971851] env[63241]: _type = "Task" [ 1709.971851] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.982405] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820744, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.989675] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "refresh_cache-31998a62-70f5-4205-89b9-df8312916126" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.989675] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired lock "refresh_cache-31998a62-70f5-4205-89b9-df8312916126" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.989834] env[63241]: DEBUG nova.network.neutron [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1710.043589] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1710.044050] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60318aac-89ff-4401-aa68-c7d96c3855b1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.054736] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820738, 'name': Rename_Task, 'duration_secs': 1.044544} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.056036] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1710.056602] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1710.056602] env[63241]: value = "task-1820745" [ 1710.056602] env[63241]: _type = "Task" [ 1710.056602] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.056602] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22253a79-7afc-4113-ab9d-62a789bc3a27 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.066468] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820745, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.067852] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1710.067852] env[63241]: value = "task-1820746" [ 1710.067852] env[63241]: _type = "Task" [ 1710.067852] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.079582] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820746, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.082999] env[63241]: DEBUG oslo_vmware.api [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Task: {'id': task-1820742, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.387652} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.083284] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1710.083492] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1710.083642] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1710.083821] env[63241]: INFO nova.compute.manager [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Took 2.37 seconds to destroy the instance on the hypervisor. [ 1710.084080] env[63241]: DEBUG oslo.service.loopingcall [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1710.084290] env[63241]: DEBUG nova.compute.manager [-] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1710.084443] env[63241]: DEBUG nova.network.neutron [-] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1710.113259] env[63241]: DEBUG nova.compute.utils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1710.118130] env[63241]: DEBUG nova.compute.manager [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1710.118358] env[63241]: DEBUG nova.network.neutron [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1710.238996] env[63241]: DEBUG nova.policy [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb9af037642e4044826d210ea26affee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d257d51a2254f5386fd3348602e5b71', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1710.332051] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820743, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.485552] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820744, 'name': ReconfigVM_Task, 'duration_secs': 0.422806} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.485858] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 381bba62-49a7-4d6f-b12a-741f5d884fe5/381bba62-49a7-4d6f-b12a-741f5d884fe5.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1710.486624] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b90797eb-b280-4691-9135-534853db7278 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.500321] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1710.500321] env[63241]: value = "task-1820747" [ 1710.500321] env[63241]: _type = "Task" [ 1710.500321] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.516095] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820747, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.571229] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820745, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.580313] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820746, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.587713] env[63241]: DEBUG nova.network.neutron [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1710.612134] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4a3677-72af-4fc7-a903-1f8e9133929f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.618327] env[63241]: DEBUG nova.compute.manager [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1710.624539] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d3ff8b-3156-41eb-b502-814c7950e95d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.664718] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76be1aa8-c88d-4f4b-aadb-1485dba063d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.673852] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f0d6ad-f9ad-4fc2-bec8-b234c9f927f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.689877] env[63241]: DEBUG nova.compute.provider_tree [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1710.828795] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820743, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.676358} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.829527] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f65e5b00-38b5-4453-b370-1f56f18053eb/f65e5b00-38b5-4453-b370-1f56f18053eb.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1710.829726] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1710.829993] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4f72575-13b5-48e7-9481-62aea9160e19 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.836704] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1710.836704] env[63241]: value = "task-1820748" [ 1710.836704] env[63241]: _type = "Task" [ 1710.836704] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.845232] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820748, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.969563] env[63241]: DEBUG nova.network.neutron [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Updating instance_info_cache with network_info: [{"id": "3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7", "address": "fa:16:3e:f4:2f:70", "network": {"id": "355e2d29-1968-4065-94a6-f9e5946a75c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-154610021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d1a62ae45c74a7ba071363005b3a52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7fb03d-c1", "ovs_interfaceid": "3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1711.010384] env[63241]: DEBUG nova.network.neutron [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Successfully created port: 7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1711.017889] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820747, 'name': Rename_Task, 'duration_secs': 0.198317} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.018210] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1711.018463] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b473230-8ca4-45d4-bc39-7749478dc5e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.025193] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1711.025193] env[63241]: value = "task-1820749" [ 1711.025193] env[63241]: _type = "Task" [ 1711.025193] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.033746] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820749, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.068817] env[63241]: DEBUG oslo_vmware.api [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820745, 'name': PowerOnVM_Task, 'duration_secs': 0.769742} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.069140] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1711.069344] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6a9640-4b95-44c1-888e-80349e73ae67 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance '7f1710d0-857d-41fc-8151-8c5e129dda08' progress to 100 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1711.083348] env[63241]: DEBUG oslo_vmware.api [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820746, 'name': PowerOnVM_Task, 'duration_secs': 0.671254} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.083348] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1711.083348] env[63241]: INFO nova.compute.manager [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Took 16.32 seconds to spawn the instance on the hypervisor. [ 1711.083348] env[63241]: DEBUG nova.compute.manager [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1711.083595] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7846167-4c1c-48d3-bd88-163c6fe74a90 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.168348] env[63241]: DEBUG nova.network.neutron [-] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1711.195957] env[63241]: DEBUG nova.scheduler.client.report [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1711.349425] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820748, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068586} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.349742] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1711.350568] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6914fc61-056c-462f-80cb-80f1b7953ed9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.374212] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] f65e5b00-38b5-4453-b370-1f56f18053eb/f65e5b00-38b5-4453-b370-1f56f18053eb.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1711.374514] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-600a502e-51b1-4aad-afb3-24fede4da72e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.396064] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1711.396064] env[63241]: value = "task-1820750" [ 1711.396064] env[63241]: _type = "Task" [ 1711.396064] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.406423] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820750, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.447253] env[63241]: DEBUG nova.compute.manager [req-293ac4a9-cf75-432a-b17b-ce3166aca998 req-897ca343-5364-43c0-bbb2-2551cbc2ea59 service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] Received event network-changed-3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1711.447469] env[63241]: DEBUG nova.compute.manager [req-293ac4a9-cf75-432a-b17b-ce3166aca998 req-897ca343-5364-43c0-bbb2-2551cbc2ea59 service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] Refreshing instance network info cache due to event network-changed-3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1711.447674] env[63241]: DEBUG oslo_concurrency.lockutils [req-293ac4a9-cf75-432a-b17b-ce3166aca998 req-897ca343-5364-43c0-bbb2-2551cbc2ea59 service nova] Acquiring lock "refresh_cache-31998a62-70f5-4205-89b9-df8312916126" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1711.472315] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lock "refresh_cache-31998a62-70f5-4205-89b9-df8312916126" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1711.472677] env[63241]: DEBUG nova.compute.manager [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Instance network_info: |[{"id": "3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7", "address": "fa:16:3e:f4:2f:70", "network": {"id": "355e2d29-1968-4065-94a6-f9e5946a75c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-154610021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d1a62ae45c74a7ba071363005b3a52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7fb03d-c1", "ovs_interfaceid": "3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1711.473020] env[63241]: DEBUG oslo_concurrency.lockutils [req-293ac4a9-cf75-432a-b17b-ce3166aca998 req-897ca343-5364-43c0-bbb2-2551cbc2ea59 service nova] Acquired lock "refresh_cache-31998a62-70f5-4205-89b9-df8312916126" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1711.473235] env[63241]: DEBUG nova.network.neutron [req-293ac4a9-cf75-432a-b17b-ce3166aca998 req-897ca343-5364-43c0-bbb2-2551cbc2ea59 service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] Refreshing network info cache for port 3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1711.474560] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:2f:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bfae3ef8-cae7-455d-8632-ba93e1671625', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1711.483019] env[63241]: DEBUG oslo.service.loopingcall [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1711.484386] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31998a62-70f5-4205-89b9-df8312916126] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1711.484641] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed4bf7ea-25e9-4211-abb4-93f65e4708ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.505676] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1711.505676] env[63241]: value = "task-1820751" [ 1711.505676] env[63241]: _type = "Task" [ 1711.505676] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.519630] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820751, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.541914] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820749, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.600907] env[63241]: INFO nova.compute.manager [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Took 39.20 seconds to build instance. [ 1711.632013] env[63241]: DEBUG nova.compute.manager [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1711.661969] env[63241]: DEBUG nova.virt.hardware [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1711.663033] env[63241]: DEBUG nova.virt.hardware [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1711.663033] env[63241]: DEBUG nova.virt.hardware [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1711.663033] env[63241]: DEBUG nova.virt.hardware [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1711.663033] env[63241]: DEBUG nova.virt.hardware [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1711.663033] env[63241]: DEBUG nova.virt.hardware [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1711.663033] env[63241]: DEBUG nova.virt.hardware [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1711.663353] env[63241]: DEBUG nova.virt.hardware [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1711.663353] env[63241]: DEBUG nova.virt.hardware [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1711.663431] env[63241]: DEBUG nova.virt.hardware [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1711.663590] env[63241]: DEBUG nova.virt.hardware [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1711.664550] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f06bec-9177-4c59-a933-76f54556b331 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.672347] env[63241]: INFO nova.compute.manager [-] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Took 1.59 seconds to deallocate network for instance. [ 1711.678137] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db3db62-25bf-4a13-b5b4-c773b7ac507b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.907850] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820750, 'name': ReconfigVM_Task, 'duration_secs': 0.285832} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.908266] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Reconfigured VM instance instance-00000054 to attach disk [datastore1] f65e5b00-38b5-4453-b370-1f56f18053eb/f65e5b00-38b5-4453-b370-1f56f18053eb.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1711.909035] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b5303e1-71c3-4442-b126-2ebbf006894d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.915872] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1711.915872] env[63241]: value = "task-1820752" [ 1711.915872] env[63241]: _type = "Task" [ 1711.915872] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.926777] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820752, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.016329] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820751, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.038951] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820749, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.102971] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6de1d37c-1166-4439-97da-c7512779fb1a tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.706s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.208504] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.599s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.212894] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 19.411s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.242955] env[63241]: INFO nova.compute.manager [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Took 0.57 seconds to detach 1 volumes for instance. [ 1712.243229] env[63241]: DEBUG nova.compute.manager [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Deleting volume: f8c6db70-e484-49c7-8211-edd49f1c6d75 {{(pid=63241) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1712.334206] env[63241]: DEBUG nova.network.neutron [req-293ac4a9-cf75-432a-b17b-ce3166aca998 req-897ca343-5364-43c0-bbb2-2551cbc2ea59 service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] Updated VIF entry in instance network info cache for port 3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1712.334742] env[63241]: DEBUG nova.network.neutron [req-293ac4a9-cf75-432a-b17b-ce3166aca998 req-897ca343-5364-43c0-bbb2-2551cbc2ea59 service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] Updating instance_info_cache with network_info: [{"id": "3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7", "address": "fa:16:3e:f4:2f:70", "network": {"id": "355e2d29-1968-4065-94a6-f9e5946a75c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-154610021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d1a62ae45c74a7ba071363005b3a52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a7fb03d-c1", "ovs_interfaceid": "3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1712.423678] env[63241]: DEBUG oslo_concurrency.lockutils [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.423906] env[63241]: DEBUG oslo_concurrency.lockutils [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.424350] env[63241]: DEBUG nova.compute.manager [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1712.424914] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c09fee4-4db5-49e0-8e9f-5919898a89de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.431688] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820752, 'name': Rename_Task, 'duration_secs': 0.152396} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.434730] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1712.435309] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5da3c460-7b48-4fac-a162-58b4ec0c3015 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.439257] env[63241]: DEBUG nova.compute.manager [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63241) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1712.440030] env[63241]: DEBUG nova.objects.instance [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lazy-loading 'flavor' on Instance uuid 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1712.446677] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1712.446677] env[63241]: value = "task-1820754" [ 1712.446677] env[63241]: _type = "Task" [ 1712.446677] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.455880] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820754, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.516077] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820751, 'name': CreateVM_Task, 'duration_secs': 0.616772} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.516346] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31998a62-70f5-4205-89b9-df8312916126] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1712.516983] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1712.517220] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.517502] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1712.517765] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59d76e12-ba32-496e-9cab-f8c8c771d7bd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.522874] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1712.522874] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5278b5e3-e5a5-7969-e214-aca41adfcc0c" [ 1712.522874] env[63241]: _type = "Task" [ 1712.522874] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.533839] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5278b5e3-e5a5-7969-e214-aca41adfcc0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.540306] env[63241]: DEBUG oslo_vmware.api [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820749, 'name': PowerOnVM_Task, 'duration_secs': 1.054161} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.540306] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1712.540306] env[63241]: INFO nova.compute.manager [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Took 12.59 seconds to spawn the instance on the hypervisor. [ 1712.540306] env[63241]: DEBUG nova.compute.manager [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1712.541047] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bdc1e9-dce7-469e-a117-7fdd12150918 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.791812] env[63241]: INFO nova.scheduler.client.report [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted allocation for migration 5eaba4ce-595b-4b48-b05e-b26491a5c40b [ 1712.798744] env[63241]: DEBUG oslo_concurrency.lockutils [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.837998] env[63241]: DEBUG oslo_concurrency.lockutils [req-293ac4a9-cf75-432a-b17b-ce3166aca998 req-897ca343-5364-43c0-bbb2-2551cbc2ea59 service nova] Releasing lock "refresh_cache-31998a62-70f5-4205-89b9-df8312916126" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1712.838264] env[63241]: DEBUG nova.compute.manager [req-293ac4a9-cf75-432a-b17b-ce3166aca998 req-897ca343-5364-43c0-bbb2-2551cbc2ea59 service nova] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Received event network-vif-deleted-0d383637-3a9e-4430-80c1-4b6b738e5817 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1712.939836] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.940194] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.940441] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.940703] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.940933] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.945245] env[63241]: INFO nova.compute.manager [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Terminating instance [ 1712.948108] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1712.952164] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3946a0de-844e-4470-8bd5-9007bcde7dbf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.954317] env[63241]: DEBUG nova.compute.manager [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1712.954635] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1712.955530] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1cd49ce-30ba-4a79-9102-a1825c4d159b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.964847] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820754, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.970511] env[63241]: DEBUG oslo_vmware.api [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1712.970511] env[63241]: value = "task-1820755" [ 1712.970511] env[63241]: _type = "Task" [ 1712.970511] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.970511] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1712.970511] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1d33475-b77f-4090-8fab-2bdcf8c70ee8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.983352] env[63241]: DEBUG oslo_vmware.api [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820755, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.984745] env[63241]: DEBUG oslo_vmware.api [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1712.984745] env[63241]: value = "task-1820756" [ 1712.984745] env[63241]: _type = "Task" [ 1712.984745] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.993685] env[63241]: DEBUG oslo_vmware.api [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820756, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.034832] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5278b5e3-e5a5-7969-e214-aca41adfcc0c, 'name': SearchDatastore_Task, 'duration_secs': 0.01424} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.034832] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1713.035261] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1713.035674] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.035806] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.035921] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1713.036248] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15f550fe-9aad-4253-8b22-1219bfe87d5a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.046397] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1713.046397] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1713.047237] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c6a2949-f7c9-46fe-a0ee-843713da02a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.057821] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1713.057821] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523f4b90-4f10-8abb-991f-080b59a7db60" [ 1713.057821] env[63241]: _type = "Task" [ 1713.057821] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.064731] env[63241]: INFO nova.compute.manager [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Took 34.76 seconds to build instance. [ 1713.072496] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523f4b90-4f10-8abb-991f-080b59a7db60, 'name': SearchDatastore_Task, 'duration_secs': 0.012835} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.073884] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23a142e5-43a7-4c76-b878-ab3213813b60 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.080698] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1713.080698] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527c173b-9417-1849-516e-e235576b6a16" [ 1713.080698] env[63241]: _type = "Task" [ 1713.080698] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.090725] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527c173b-9417-1849-516e-e235576b6a16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.233471] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Applying migration context for instance 7f1710d0-857d-41fc-8151-8c5e129dda08 as it has an incoming, in-progress migration a30c871a-6336-4949-86a2-c2009c56c7b0. Migration status is reverting {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1713.236279] env[63241]: INFO nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating resource usage from migration a30c871a-6336-4949-86a2-c2009c56c7b0 [ 1713.255899] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 94a604da-ad3d-415a-aa92-d648e3da803d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.256058] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 72a11582-1fad-428a-bde1-e9d0b05731cd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1713.256058] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 9361ee6a-7c4d-4409-bc3c-7da7d4550d97 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.256191] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance c7b034f7-1d7f-4782-9ecb-5987c35339cc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1713.256302] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e3842404-2c80-4fa9-b0c9-c58c484845a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.256421] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.256537] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 31e84206-e583-4610-969e-2ccae2d0b206 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.256650] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 9d301157-6870-4452-9ae6-0d45c4338886 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.256763] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance fb5d60fa-fa13-44a1-8291-4645761a0c80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.256875] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 14af9f82-525e-453c-8dc5-ef5b13c67ee4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.256984] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 29b6caa8-a07c-494b-b776-b08affa45c87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.257108] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.257234] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 1dc98fbd-a52b-42fa-8d37-d14318dbc941 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1713.257349] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance cb7eb689-b8f6-479d-aa6b-c27fab16e131 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.257463] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 0e4a3b3a-4464-404f-9154-1ab6f97ae951 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.257627] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance c8f1ce16-70b7-41fd-8516-63198139c1cc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1713.257707] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 1e172f73-972e-4401-b358-512f7e03b27f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.257807] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance f372d405-f7d5-4e5f-8c36-fe9651af2a0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.258259] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 73ea6bff-60da-4691-a569-f4e9ae92f701 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.258259] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance a77f7227-0285-48b8-bb3b-f5cfe7ad4646 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.258259] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.258259] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Migration a30c871a-6336-4949-86a2-c2009c56c7b0 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1713.258687] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 7f1710d0-857d-41fc-8151-8c5e129dda08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.258687] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 381bba62-49a7-4d6f-b12a-741f5d884fe5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.258687] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance f65e5b00-38b5-4453-b370-1f56f18053eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.258687] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 31998a62-70f5-4205-89b9-df8312916126 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.258844] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 01af6dc5-e0e7-4f8b-ad07-73af80c32577 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1713.258940] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 23 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1713.259083] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=5184MB phys_disk=200GB used_disk=22GB total_vcpus=48 used_vcpus=23 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1713.297521] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4af28c1f-6cdd-48b4-bc9d-3c1e3f7b3d25 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 27.592s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.340882] env[63241]: DEBUG nova.compute.manager [req-7986b33b-ca7c-4576-8b3c-9afb77dcb8d4 req-e7e44320-750f-4626-ae82-9ada86f036aa service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Received event network-vif-plugged-7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1713.341152] env[63241]: DEBUG oslo_concurrency.lockutils [req-7986b33b-ca7c-4576-8b3c-9afb77dcb8d4 req-e7e44320-750f-4626-ae82-9ada86f036aa service nova] Acquiring lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.341324] env[63241]: DEBUG oslo_concurrency.lockutils [req-7986b33b-ca7c-4576-8b3c-9afb77dcb8d4 req-e7e44320-750f-4626-ae82-9ada86f036aa service nova] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.341491] env[63241]: DEBUG oslo_concurrency.lockutils [req-7986b33b-ca7c-4576-8b3c-9afb77dcb8d4 req-e7e44320-750f-4626-ae82-9ada86f036aa service nova] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.341659] env[63241]: DEBUG nova.compute.manager [req-7986b33b-ca7c-4576-8b3c-9afb77dcb8d4 req-e7e44320-750f-4626-ae82-9ada86f036aa service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] No waiting events found dispatching network-vif-plugged-7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1713.341869] env[63241]: WARNING nova.compute.manager [req-7986b33b-ca7c-4576-8b3c-9afb77dcb8d4 req-e7e44320-750f-4626-ae82-9ada86f036aa service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Received unexpected event network-vif-plugged-7a0be842-edfe-48ff-9275-dbb260c7e781 for instance with vm_state building and task_state spawning. [ 1713.347154] env[63241]: DEBUG nova.network.neutron [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Successfully updated port: 7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1713.360283] env[63241]: DEBUG nova.network.neutron [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Port 56f1b482-fc2c-45e5-9aca-99ff209a166e binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1713.360546] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.360694] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.360858] env[63241]: DEBUG nova.network.neutron [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1713.459469] env[63241]: DEBUG oslo_vmware.api [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820754, 'name': PowerOnVM_Task, 'duration_secs': 0.619007} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.459816] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1713.459974] env[63241]: INFO nova.compute.manager [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Took 9.95 seconds to spawn the instance on the hypervisor. [ 1713.460154] env[63241]: DEBUG nova.compute.manager [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1713.461028] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdea2dc0-52a3-47fe-952f-0fe543c3b9a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.481615] env[63241]: DEBUG oslo_vmware.api [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820755, 'name': PowerOffVM_Task, 'duration_secs': 0.274588} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.481890] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1713.482075] env[63241]: DEBUG nova.compute.manager [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1713.482880] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d982f232-da3f-4b13-af5b-fc121dc09e14 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.496214] env[63241]: DEBUG oslo_vmware.api [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820756, 'name': PowerOffVM_Task, 'duration_secs': 0.276087} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.498054] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1713.498296] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1713.502939] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fcc2704f-1122-4244-8e92-5afdf29976d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.567808] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8830816a-bba4-482b-b3f2-3163904997cb tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "381bba62-49a7-4d6f-b12a-741f5d884fe5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.272s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.592705] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527c173b-9417-1849-516e-e235576b6a16, 'name': SearchDatastore_Task, 'duration_secs': 0.019029} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.594750] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1713.595019] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 31998a62-70f5-4205-89b9-df8312916126/31998a62-70f5-4205-89b9-df8312916126.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1713.595567] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ca1b980-b54f-4315-95a9-3b142e6176ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.603060] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1713.603060] env[63241]: value = "task-1820758" [ 1713.603060] env[63241]: _type = "Task" [ 1713.603060] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.615408] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.650965] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1713.652114] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1713.652379] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Deleting the datastore file [datastore1] a77f7227-0285-48b8-bb3b-f5cfe7ad4646 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1713.652699] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44faa865-39be-487e-a012-f5239fa27c40 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.657799] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa41cde2-00a6-43d5-928c-a94d8ffbecaf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.661358] env[63241]: DEBUG oslo_vmware.api [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1713.661358] env[63241]: value = "task-1820759" [ 1713.661358] env[63241]: _type = "Task" [ 1713.661358] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.668102] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a243fccb-8c70-4c11-bf62-19bb536a1258 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.674921] env[63241]: DEBUG oslo_vmware.api [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820759, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.705577] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88636e9b-aa55-4607-a17a-0bde24854c6c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.714396] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3473f4dd-007f-49fb-87ef-f450b8b5ddb7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.728473] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1713.851580] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.851580] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.851580] env[63241]: DEBUG nova.network.neutron [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1713.983095] env[63241]: INFO nova.compute.manager [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Took 31.46 seconds to build instance. [ 1714.015026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-add4e7ea-94ad-41cb-be3f-276c8ae38ecd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.588s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.121271] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820758, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.175017] env[63241]: DEBUG oslo_vmware.api [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820759, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310046} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.175425] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1714.175649] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1714.176211] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1714.176561] env[63241]: INFO nova.compute.manager [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1714.176873] env[63241]: DEBUG oslo.service.loopingcall [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1714.177104] env[63241]: DEBUG nova.compute.manager [-] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1714.177203] env[63241]: DEBUG nova.network.neutron [-] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1714.232231] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1714.421571] env[63241]: DEBUG nova.network.neutron [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1714.487872] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b297aa75-e123-4a87-9a7c-e16b22998a61 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Lock "f65e5b00-38b5-4453-b370-1f56f18053eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.971s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.516683] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "381bba62-49a7-4d6f-b12a-741f5d884fe5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.516962] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "381bba62-49a7-4d6f-b12a-741f5d884fe5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.517193] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "381bba62-49a7-4d6f-b12a-741f5d884fe5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.517384] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "381bba62-49a7-4d6f-b12a-741f5d884fe5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.517554] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "381bba62-49a7-4d6f-b12a-741f5d884fe5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.520297] env[63241]: INFO nova.compute.manager [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Terminating instance [ 1714.524220] env[63241]: DEBUG nova.compute.manager [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1714.524426] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1714.525337] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4478aafb-0a9c-42e0-828a-21290490f162 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.534298] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1714.534560] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5bf9209d-5f8a-459b-8d7e-3d5195242ca3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.540696] env[63241]: DEBUG oslo_vmware.api [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1714.540696] env[63241]: value = "task-1820760" [ 1714.540696] env[63241]: _type = "Task" [ 1714.540696] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.541621] env[63241]: DEBUG nova.network.neutron [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance_info_cache with network_info: [{"id": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "address": "fa:16:3e:72:df:ac", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.145", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f1b482-fc", "ovs_interfaceid": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.555632] env[63241]: DEBUG oslo_vmware.api [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820760, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.614033] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820758, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655584} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.614445] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 31998a62-70f5-4205-89b9-df8312916126/31998a62-70f5-4205-89b9-df8312916126.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1714.614725] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1714.615131] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f67a2ea2-a897-436d-b36c-a6463a3bb3d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.622986] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1714.622986] env[63241]: value = "task-1820761" [ 1714.622986] env[63241]: _type = "Task" [ 1714.622986] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.631906] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820761, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.649127] env[63241]: DEBUG nova.network.neutron [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updating instance_info_cache with network_info: [{"id": "7a0be842-edfe-48ff-9275-dbb260c7e781", "address": "fa:16:3e:aa:cc:cf", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0be842-ed", "ovs_interfaceid": "7a0be842-edfe-48ff-9275-dbb260c7e781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.740605] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1714.741336] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.528s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.741705] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.330s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.742126] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.745244] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.693s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.745705] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.749497] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.926s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.749497] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.752365] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.757s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.752617] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.756434] env[63241]: DEBUG oslo_concurrency.lockutils [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.956s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.756590] env[63241]: DEBUG nova.objects.instance [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Lazy-loading 'resources' on Instance uuid 9361ee6a-7c4d-4409-bc3c-7da7d4550d97 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1714.757858] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1714.758078] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1714.792767] env[63241]: INFO nova.scheduler.client.report [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Deleted allocations for instance 72a11582-1fad-428a-bde1-e9d0b05731cd [ 1714.808439] env[63241]: INFO nova.scheduler.client.report [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Deleted allocations for instance c8f1ce16-70b7-41fd-8516-63198139c1cc [ 1714.818788] env[63241]: INFO nova.scheduler.client.report [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Deleted allocations for instance 1dc98fbd-a52b-42fa-8d37-d14318dbc941 [ 1714.841129] env[63241]: INFO nova.scheduler.client.report [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Deleted allocations for instance c7b034f7-1d7f-4782-9ecb-5987c35339cc [ 1715.051104] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.060326] env[63241]: DEBUG oslo_vmware.api [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820760, 'name': PowerOffVM_Task, 'duration_secs': 0.478947} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.061218] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1715.061592] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1715.062601] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-033a28a9-5656-43b8-b2e2-21796c37bd34 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.137016] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820761, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.337897} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.137879] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1715.138900] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb291714-51e1-45d6-92a4-96e825ef0828 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.160476] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.160950] env[63241]: DEBUG nova.compute.manager [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Instance network_info: |[{"id": "7a0be842-edfe-48ff-9275-dbb260c7e781", "address": "fa:16:3e:aa:cc:cf", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0be842-ed", "ovs_interfaceid": "7a0be842-edfe-48ff-9275-dbb260c7e781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1715.172217] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 31998a62-70f5-4205-89b9-df8312916126/31998a62-70f5-4205-89b9-df8312916126.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1715.172822] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:cc:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a0be842-edfe-48ff-9275-dbb260c7e781', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1715.181984] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating folder: Project (5d257d51a2254f5386fd3348602e5b71). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1715.182831] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5bfb976-fbc7-4145-ae96-7fa2b09dde39 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.202275] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3dce9ff-09e7-458a-8563-f45e45602676 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.209152] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1715.209152] env[63241]: value = "task-1820764" [ 1715.209152] env[63241]: _type = "Task" [ 1715.209152] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.215932] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Created folder: Project (5d257d51a2254f5386fd3348602e5b71) in parent group-v376927. [ 1715.215932] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating folder: Instances. Parent ref: group-v377151. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1715.215932] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6a7f741-77f3-4506-a5eb-c848c1c26cc3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.221236] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820764, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.231148] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Created folder: Instances in parent group-v377151. [ 1715.231463] env[63241]: DEBUG oslo.service.loopingcall [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1715.231656] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1715.231921] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f64fd17-b8b5-4e40-9311-302c474899a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.279586] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] There are 56 instances to clean {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1715.279586] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 4a57d04b-72a0-4db3-8119-994b67e4b096] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1715.287184] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1715.287184] env[63241]: value = "task-1820766" [ 1715.287184] env[63241]: _type = "Task" [ 1715.287184] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.299951] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820766, 'name': CreateVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.311093] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f5efb847-f06c-47e2-b7ac-5d44ca18358c tempest-VolumesAssistedSnapshotsTest-265288555 tempest-VolumesAssistedSnapshotsTest-265288555-project-member] Lock "72a11582-1fad-428a-bde1-e9d0b05731cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.469s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.318937] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c8388a11-ce79-441b-8dbd-59263e988c66 tempest-ImagesTestJSON-2007080518 tempest-ImagesTestJSON-2007080518-project-member] Lock "c8f1ce16-70b7-41fd-8516-63198139c1cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.293s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.327753] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eff49e88-9775-4ab5-9925-ce1ac377db89 tempest-ServerShowV247Test-688784001 tempest-ServerShowV247Test-688784001-project-member] Lock "1dc98fbd-a52b-42fa-8d37-d14318dbc941" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.842s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.347416] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b525559d-c087-4785-9856-e4c4ad42f785 tempest-ServersWithSpecificFlavorTestJSON-693138178 tempest-ServersWithSpecificFlavorTestJSON-693138178-project-member] Lock "c7b034f7-1d7f-4782-9ecb-5987c35339cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.529s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.368852] env[63241]: DEBUG nova.network.neutron [-] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.496649] env[63241]: DEBUG nova.compute.manager [req-f867ab8e-9bb1-49a7-96b8-bcd79f7f74ba req-0371e1a0-693a-426f-9ece-1efe70d131da service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Received event network-changed-7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1715.496850] env[63241]: DEBUG nova.compute.manager [req-f867ab8e-9bb1-49a7-96b8-bcd79f7f74ba req-0371e1a0-693a-426f-9ece-1efe70d131da service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Refreshing instance network info cache due to event network-changed-7a0be842-edfe-48ff-9275-dbb260c7e781. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1715.497227] env[63241]: DEBUG oslo_concurrency.lockutils [req-f867ab8e-9bb1-49a7-96b8-bcd79f7f74ba req-0371e1a0-693a-426f-9ece-1efe70d131da service nova] Acquiring lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.497443] env[63241]: DEBUG oslo_concurrency.lockutils [req-f867ab8e-9bb1-49a7-96b8-bcd79f7f74ba req-0371e1a0-693a-426f-9ece-1efe70d131da service nova] Acquired lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.497588] env[63241]: DEBUG nova.network.neutron [req-f867ab8e-9bb1-49a7-96b8-bcd79f7f74ba req-0371e1a0-693a-426f-9ece-1efe70d131da service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Refreshing network info cache for port 7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1715.555414] env[63241]: DEBUG nova.compute.manager [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63241) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1715.555658] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.609141] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.609329] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.609537] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.609721] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.609887] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.612949] env[63241]: INFO nova.compute.manager [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Terminating instance [ 1715.615307] env[63241]: DEBUG nova.compute.manager [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1715.615615] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1715.616451] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2773fe76-0837-4fe6-99b9-f5d9d1d9b79f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.626746] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1715.627022] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d85e9e7-79c0-47ed-af5e-338966de709b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.660451] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49074ff3-246e-4d3f-9e8e-81002c4b7972 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.668521] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed31041-2269-4e20-b708-c587e6d4e543 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.701869] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac5830a-e21d-42af-87e5-078445d48ab1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.711058] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e7c3c5-bb97-4452-af30-6991206728c1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.716407] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1715.716407] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1715.716407] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleting the datastore file [datastore1] 381bba62-49a7-4d6f-b12a-741f5d884fe5 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1715.720359] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09e13fca-1395-43de-891d-3bc1c95150f2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.732716] env[63241]: DEBUG nova.compute.provider_tree [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1715.738335] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820764, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.739026] env[63241]: DEBUG oslo_vmware.api [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1715.739026] env[63241]: value = "task-1820768" [ 1715.739026] env[63241]: _type = "Task" [ 1715.739026] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.741021] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1715.741021] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1715.741021] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleting the datastore file [datastore1] 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1715.744360] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd614306-d4d8-4941-885c-6466c8dfd5bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.753145] env[63241]: DEBUG oslo_vmware.api [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820768, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.754057] env[63241]: DEBUG oslo_vmware.api [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1715.754057] env[63241]: value = "task-1820769" [ 1715.754057] env[63241]: _type = "Task" [ 1715.754057] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.758533] env[63241]: INFO nova.compute.manager [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Rescuing [ 1715.758804] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquiring lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.759016] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquired lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.759214] env[63241]: DEBUG nova.network.neutron [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1715.766595] env[63241]: DEBUG oslo_vmware.api [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820769, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.790039] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: effc3987-45d0-4305-83a2-0eba47d2c7fd] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1715.805488] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820766, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.872271] env[63241]: INFO nova.compute.manager [-] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Took 1.69 seconds to deallocate network for instance. [ 1716.227595] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820764, 'name': ReconfigVM_Task, 'duration_secs': 0.665335} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.228272] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 31998a62-70f5-4205-89b9-df8312916126/31998a62-70f5-4205-89b9-df8312916126.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1716.229041] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edf20bde-86e3-48ac-8a3d-ee8bbab04af1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.235914] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1716.235914] env[63241]: value = "task-1820770" [ 1716.235914] env[63241]: _type = "Task" [ 1716.235914] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.239815] env[63241]: DEBUG nova.scheduler.client.report [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1716.253244] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820770, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.261525] env[63241]: DEBUG oslo_vmware.api [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820768, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.325077} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.262155] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1716.262368] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1716.262534] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1716.262702] env[63241]: INFO nova.compute.manager [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Took 1.74 seconds to destroy the instance on the hypervisor. [ 1716.262967] env[63241]: DEBUG oslo.service.loopingcall [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.263428] env[63241]: DEBUG nova.compute.manager [-] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1716.263532] env[63241]: DEBUG nova.network.neutron [-] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1716.270256] env[63241]: DEBUG oslo_vmware.api [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820769, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291647} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.270791] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1716.270973] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1716.271330] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1716.271330] env[63241]: INFO nova.compute.manager [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Took 0.66 seconds to destroy the instance on the hypervisor. [ 1716.271545] env[63241]: DEBUG oslo.service.loopingcall [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.271995] env[63241]: DEBUG nova.compute.manager [-] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1716.272107] env[63241]: DEBUG nova.network.neutron [-] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1716.291719] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 20c7a1a1-4396-414f-a52c-06551722b6eb] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1716.305832] env[63241]: DEBUG oslo_concurrency.lockutils [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.306053] env[63241]: DEBUG oslo_concurrency.lockutils [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.306427] env[63241]: DEBUG oslo_concurrency.lockutils [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.306689] env[63241]: DEBUG oslo_concurrency.lockutils [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.306894] env[63241]: DEBUG oslo_concurrency.lockutils [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.324401] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820766, 'name': CreateVM_Task, 'duration_secs': 0.669489} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.324401] env[63241]: INFO nova.compute.manager [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Terminating instance [ 1716.325028] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1716.325691] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.325861] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.326238] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1716.326780] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a651bbca-2566-4145-87b3-681b573639aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.331630] env[63241]: DEBUG nova.compute.manager [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1716.331844] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1716.334216] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e400c5-9914-4391-8f3c-0e44ad1b9d4c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.337532] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1716.337532] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5263611a-91ac-d668-0e7a-c69e6e883fbd" [ 1716.337532] env[63241]: _type = "Task" [ 1716.337532] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.343825] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1716.344380] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6727f90d-3519-4b66-9bee-6eb2cd5b780d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.349819] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5263611a-91ac-d668-0e7a-c69e6e883fbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.353656] env[63241]: DEBUG oslo_vmware.api [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1716.353656] env[63241]: value = "task-1820771" [ 1716.353656] env[63241]: _type = "Task" [ 1716.353656] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.374681] env[63241]: DEBUG oslo_vmware.api [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.383994] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.539691] env[63241]: DEBUG nova.network.neutron [req-f867ab8e-9bb1-49a7-96b8-bcd79f7f74ba req-0371e1a0-693a-426f-9ece-1efe70d131da service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updated VIF entry in instance network info cache for port 7a0be842-edfe-48ff-9275-dbb260c7e781. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1716.540243] env[63241]: DEBUG nova.network.neutron [req-f867ab8e-9bb1-49a7-96b8-bcd79f7f74ba req-0371e1a0-693a-426f-9ece-1efe70d131da service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updating instance_info_cache with network_info: [{"id": "7a0be842-edfe-48ff-9275-dbb260c7e781", "address": "fa:16:3e:aa:cc:cf", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0be842-ed", "ovs_interfaceid": "7a0be842-edfe-48ff-9275-dbb260c7e781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.741734] env[63241]: DEBUG nova.network.neutron [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updating instance_info_cache with network_info: [{"id": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "address": "fa:16:3e:d2:d7:ea", "network": {"id": "d35148d1-0dba-4e39-9bc1-ad561f93e9f8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1579993511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e7a8a40a56ee42dca4190ac78e5f22ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c2b89fb-df8c-47c0-83ae-44291236feb4", "external-id": "nsx-vlan-transportzone-174", "segmentation_id": 174, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac5cae6-11", "ovs_interfaceid": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.748511] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820770, 'name': Rename_Task, 'duration_secs': 0.283504} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.748785] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1716.749079] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27012d8a-8a24-4da4-8e80-d9ea97ef2f57 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.753851] env[63241]: DEBUG oslo_concurrency.lockutils [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.999s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.757509] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 1.202s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.758889] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1716.758889] env[63241]: value = "task-1820772" [ 1716.758889] env[63241]: _type = "Task" [ 1716.758889] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.770198] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820772, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.772844] env[63241]: INFO nova.scheduler.client.report [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Deleted allocations for instance 9361ee6a-7c4d-4409-bc3c-7da7d4550d97 [ 1716.804376] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: f583adda-976e-4f79-adc7-0b4e1a73ad73] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1716.849833] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5263611a-91ac-d668-0e7a-c69e6e883fbd, 'name': SearchDatastore_Task, 'duration_secs': 0.034246} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.849955] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1716.850389] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1716.850633] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.850786] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.851498] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1716.851865] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d61a2a0d-a420-4628-a4fa-abc02eccfb3c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.861990] env[63241]: DEBUG oslo_vmware.api [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820771, 'name': PowerOffVM_Task, 'duration_secs': 0.241736} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.863601] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1716.863800] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1716.864128] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1716.864318] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1716.865710] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f300e88-fee7-4d00-adc9-476a5365e10e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.867052] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9fe1976-c5c2-45f7-85b1-e2693bb7152a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.872433] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1716.872433] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5226a1dd-3ee6-c612-0e12-a4f08f2e3846" [ 1716.872433] env[63241]: _type = "Task" [ 1716.872433] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.880702] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5226a1dd-3ee6-c612-0e12-a4f08f2e3846, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.887358] env[63241]: DEBUG nova.compute.manager [req-f171cd49-f2d2-483f-b93c-89aeb8c283af req-9b8f898b-3cb2-4306-8795-e66b34c03e83 service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Received event network-vif-deleted-92b2ae26-3653-4737-891e-09a99ee68a10 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1716.887570] env[63241]: INFO nova.compute.manager [req-f171cd49-f2d2-483f-b93c-89aeb8c283af req-9b8f898b-3cb2-4306-8795-e66b34c03e83 service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Neutron deleted interface 92b2ae26-3653-4737-891e-09a99ee68a10; detaching it from the instance and deleting it from the info cache [ 1716.887779] env[63241]: DEBUG nova.network.neutron [req-f171cd49-f2d2-483f-b93c-89aeb8c283af req-9b8f898b-3cb2-4306-8795-e66b34c03e83 service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.034690] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "864175e0-33f0-429f-bdf6-722d9b00da2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.035053] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.046415] env[63241]: DEBUG oslo_concurrency.lockutils [req-f867ab8e-9bb1-49a7-96b8-bcd79f7f74ba req-0371e1a0-693a-426f-9ece-1efe70d131da service nova] Releasing lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.046415] env[63241]: DEBUG nova.compute.manager [req-f867ab8e-9bb1-49a7-96b8-bcd79f7f74ba req-0371e1a0-693a-426f-9ece-1efe70d131da service nova] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Received event network-vif-deleted-eaf7faa9-8f94-4c74-9c0d-96c349efc7d7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1717.046415] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1717.046415] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1717.046415] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleting the datastore file [datastore1] cb7eb689-b8f6-479d-aa6b-c27fab16e131 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1717.046415] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b69778f2-8e36-4f60-9cc7-488636143c53 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.055336] env[63241]: DEBUG oslo_vmware.api [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1717.055336] env[63241]: value = "task-1820774" [ 1717.055336] env[63241]: _type = "Task" [ 1717.055336] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.065099] env[63241]: DEBUG oslo_vmware.api [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.244878] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Releasing lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.261710] env[63241]: DEBUG nova.objects.instance [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lazy-loading 'migration_context' on Instance uuid 7f1710d0-857d-41fc-8151-8c5e129dda08 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1717.273163] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820772, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.281505] env[63241]: DEBUG oslo_concurrency.lockutils [None req-83e37419-b4e5-4364-ae48-e2630a76101a tempest-ServerActionsV293TestJSON-71039187 tempest-ServerActionsV293TestJSON-71039187-project-member] Lock "9361ee6a-7c4d-4409-bc3c-7da7d4550d97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.570s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.309152] env[63241]: DEBUG nova.network.neutron [-] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.310385] env[63241]: DEBUG nova.network.neutron [-] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.312826] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: cfdc6b34-6940-414f-b17d-6fe17f92474a] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1717.391978] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5226a1dd-3ee6-c612-0e12-a4f08f2e3846, 'name': SearchDatastore_Task, 'duration_secs': 0.026577} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.392329] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c170774a-a2bc-4d76-adb3-756a12677aee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.395313] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a3a6451-2c3a-4714-8dcc-b6d43f487647 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.402523] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1717.402523] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5231314b-38a1-d06a-21e1-6dc41c75d9a6" [ 1717.402523] env[63241]: _type = "Task" [ 1717.402523] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.409799] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb80029c-b630-4e22-9361-7324f3759453 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.429475] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5231314b-38a1-d06a-21e1-6dc41c75d9a6, 'name': SearchDatastore_Task, 'duration_secs': 0.013681} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.429856] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.430192] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 01af6dc5-e0e7-4f8b-ad07-73af80c32577/01af6dc5-e0e7-4f8b-ad07-73af80c32577.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1717.430533] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-592756b7-7002-4143-a45f-a19e0ed851a6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.450870] env[63241]: DEBUG nova.compute.manager [req-f171cd49-f2d2-483f-b93c-89aeb8c283af req-9b8f898b-3cb2-4306-8795-e66b34c03e83 service nova] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Detach interface failed, port_id=92b2ae26-3653-4737-891e-09a99ee68a10, reason: Instance 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1717.451172] env[63241]: DEBUG nova.compute.manager [req-f171cd49-f2d2-483f-b93c-89aeb8c283af req-9b8f898b-3cb2-4306-8795-e66b34c03e83 service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Received event network-vif-deleted-43c2edfc-733f-41ab-8cd3-c132dd83c038 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1717.451487] env[63241]: INFO nova.compute.manager [req-f171cd49-f2d2-483f-b93c-89aeb8c283af req-9b8f898b-3cb2-4306-8795-e66b34c03e83 service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Neutron deleted interface 43c2edfc-733f-41ab-8cd3-c132dd83c038; detaching it from the instance and deleting it from the info cache [ 1717.451854] env[63241]: DEBUG nova.network.neutron [req-f171cd49-f2d2-483f-b93c-89aeb8c283af req-9b8f898b-3cb2-4306-8795-e66b34c03e83 service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.459864] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1717.459864] env[63241]: value = "task-1820775" [ 1717.459864] env[63241]: _type = "Task" [ 1717.459864] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.469344] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820775, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.538051] env[63241]: DEBUG nova.compute.manager [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1717.569709] env[63241]: DEBUG oslo_vmware.api [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820774, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314775} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.571497] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1717.571497] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1717.571497] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1717.571497] env[63241]: INFO nova.compute.manager [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1717.571497] env[63241]: DEBUG oslo.service.loopingcall [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1717.571497] env[63241]: DEBUG nova.compute.manager [-] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1717.571827] env[63241]: DEBUG nova.network.neutron [-] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1717.781801] env[63241]: DEBUG oslo_vmware.api [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820772, 'name': PowerOnVM_Task, 'duration_secs': 0.787953} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.782245] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1717.782595] env[63241]: INFO nova.compute.manager [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Took 8.88 seconds to spawn the instance on the hypervisor. [ 1717.782804] env[63241]: DEBUG nova.compute.manager [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1717.784037] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0cbf11-8e36-40a3-a59f-b9e204f74ccf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.810592] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1717.814031] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-774c4e8f-a98a-4f6f-af5a-b520723309ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.816625] env[63241]: INFO nova.compute.manager [-] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Took 1.54 seconds to deallocate network for instance. [ 1717.816868] env[63241]: INFO nova.compute.manager [-] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Took 1.55 seconds to deallocate network for instance. [ 1717.817245] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: ef36a081-6273-4397-b48f-c2bd03d0a865] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1717.826742] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1717.826742] env[63241]: value = "task-1820776" [ 1717.826742] env[63241]: _type = "Task" [ 1717.826742] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.847555] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.957403] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1278b3dc-6b1e-42ce-a6d4-72ec71d409ad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.979201] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820775, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.982532] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20428872-c675-41cc-b930-4731004a86ee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.026851] env[63241]: DEBUG nova.compute.manager [req-f171cd49-f2d2-483f-b93c-89aeb8c283af req-9b8f898b-3cb2-4306-8795-e66b34c03e83 service nova] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Detach interface failed, port_id=43c2edfc-733f-41ab-8cd3-c132dd83c038, reason: Instance 381bba62-49a7-4d6f-b12a-741f5d884fe5 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1718.065571] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.254885] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7024ec-7449-418e-8a4a-ae0ed47dda69 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.265828] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6d9a3c-b942-4770-9ca6-a862489b6ff9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.307281] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0db4e96-ec07-4c98-8ece-27e97d7d14ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.317209] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a5b879-623a-4720-b297-d1c89928036b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.326178] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 9e6ca606-383d-42f0-aea4-edecde33c1a4] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1718.328229] env[63241]: INFO nova.compute.manager [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Took 31.94 seconds to build instance. [ 1718.334458] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.347307] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.347562] env[63241]: DEBUG nova.compute.provider_tree [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1718.358183] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820776, 'name': PowerOffVM_Task, 'duration_secs': 0.211672} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.358183] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1718.358746] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b9dd61-3ab3-4a77-ab4f-0c4cd2971f3a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.379550] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3f2b45-da85-418e-b1e5-dca897a8169d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.437657] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1718.438218] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c52d500d-14b5-4205-852e-dafc7a8d6dcf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.446947] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1718.446947] env[63241]: value = "task-1820777" [ 1718.446947] env[63241]: _type = "Task" [ 1718.446947] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.467066] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1718.467488] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1718.467941] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.468256] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.468589] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1718.469015] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf095c0a-e7d5-474d-b0f0-8d8644939367 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.479104] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820775, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.841482} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.479406] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 01af6dc5-e0e7-4f8b-ad07-73af80c32577/01af6dc5-e0e7-4f8b-ad07-73af80c32577.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1718.481168] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1718.481168] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37f8c4b4-cd41-4821-b747-923e88433253 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.484389] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1718.484567] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1718.485297] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba54f67f-0bb5-4396-8ac2-8b55ddc863db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.490874] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1718.490874] env[63241]: value = "task-1820778" [ 1718.490874] env[63241]: _type = "Task" [ 1718.490874] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.495035] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1718.495035] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524735d1-4bcf-640a-eacd-80b13c39a10f" [ 1718.495035] env[63241]: _type = "Task" [ 1718.495035] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.500018] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820778, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.504923] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524735d1-4bcf-640a-eacd-80b13c39a10f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.830146] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 9b61cee5-65b4-499e-80fd-c6ce6f79dd13] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1718.832442] env[63241]: DEBUG oslo_concurrency.lockutils [None req-86e20cd6-1a13-4a25-8e93-d1c869539809 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "31998a62-70f5-4205-89b9-df8312916126" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.462s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.853865] env[63241]: DEBUG nova.scheduler.client.report [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1718.857721] env[63241]: DEBUG nova.network.neutron [-] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.003403] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820778, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067694} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.011426] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1719.012385] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524735d1-4bcf-640a-eacd-80b13c39a10f, 'name': SearchDatastore_Task, 'duration_secs': 0.015041} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.013127] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d43d7ed-0a55-4694-be6b-c73ba0a26cdf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.017438] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bab6c077-6b70-42b3-93f4-76d07df47a0c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.022973] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1719.022973] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c401a3-f752-59eb-2843-e61007660940" [ 1719.022973] env[63241]: _type = "Task" [ 1719.022973] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.043347] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 01af6dc5-e0e7-4f8b-ad07-73af80c32577/01af6dc5-e0e7-4f8b-ad07-73af80c32577.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1719.046958] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ec7b2d4-d7c0-45e3-ab67-c41aa6a216d9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.067181] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c401a3-f752-59eb-2843-e61007660940, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.068589] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1719.068589] env[63241]: value = "task-1820779" [ 1719.068589] env[63241]: _type = "Task" [ 1719.068589] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.076750] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820779, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.233500] env[63241]: DEBUG nova.compute.manager [req-b8ca9c7d-b649-47f2-a033-b68daa17ffa6 req-5850e12a-8154-4799-8554-77f95fc930bf service nova] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Received event network-vif-deleted-5546d295-8d78-4143-b874-e6cc21c5945a {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1719.244131] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f2ea3fe-5aed-4271-a3d1-b57bdb737fdc tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.244571] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f2ea3fe-5aed-4271-a3d1-b57bdb737fdc tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.245605] env[63241]: DEBUG nova.objects.instance [None req-3f2ea3fe-5aed-4271-a3d1-b57bdb737fdc tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'flavor' on Instance uuid 9d301157-6870-4452-9ae6-0d45c4338886 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1719.334484] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: f4eb5e49-bae0-435c-93f0-15d6939f9e7c] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1719.365716] env[63241]: INFO nova.compute.manager [-] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Took 1.79 seconds to deallocate network for instance. [ 1719.544349] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c401a3-f752-59eb-2843-e61007660940, 'name': SearchDatastore_Task, 'duration_secs': 0.066531} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.544876] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.545283] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f65e5b00-38b5-4453-b370-1f56f18053eb/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. {{(pid=63241) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1719.545597] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d61ece42-7aa9-47ae-a2da-23271059b46a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.553098] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1719.553098] env[63241]: value = "task-1820780" [ 1719.553098] env[63241]: _type = "Task" [ 1719.553098] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.561976] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820780, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.581748] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820779, 'name': ReconfigVM_Task, 'duration_secs': 0.424995} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.582092] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 01af6dc5-e0e7-4f8b-ad07-73af80c32577/01af6dc5-e0e7-4f8b-ad07-73af80c32577.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1719.582763] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41eac668-8572-4985-8d7f-609afe64fb32 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.589720] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1719.589720] env[63241]: value = "task-1820781" [ 1719.589720] env[63241]: _type = "Task" [ 1719.589720] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.598789] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820781, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.749661] env[63241]: DEBUG nova.objects.instance [None req-3f2ea3fe-5aed-4271-a3d1-b57bdb737fdc tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'pci_requests' on Instance uuid 9d301157-6870-4452-9ae6-0d45c4338886 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1719.839215] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 34d138e6-90b3-4243-bf45-96ae856cd631] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1719.872323] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.115s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.885440] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.502s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.885719] env[63241]: DEBUG nova.objects.instance [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lazy-loading 'resources' on Instance uuid a77f7227-0285-48b8-bb3b-f5cfe7ad4646 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1719.887504] env[63241]: DEBUG oslo_concurrency.lockutils [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.934966] env[63241]: DEBUG oslo_concurrency.lockutils [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "31998a62-70f5-4205-89b9-df8312916126" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.935076] env[63241]: DEBUG oslo_concurrency.lockutils [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "31998a62-70f5-4205-89b9-df8312916126" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.935262] env[63241]: DEBUG oslo_concurrency.lockutils [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "31998a62-70f5-4205-89b9-df8312916126-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.935472] env[63241]: DEBUG oslo_concurrency.lockutils [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "31998a62-70f5-4205-89b9-df8312916126-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.935682] env[63241]: DEBUG oslo_concurrency.lockutils [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "31998a62-70f5-4205-89b9-df8312916126-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.940042] env[63241]: INFO nova.compute.manager [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Terminating instance [ 1719.945104] env[63241]: DEBUG nova.compute.manager [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1719.945314] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1719.946300] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ff5005-82f5-40bc-8b18-ba2f9c804f8c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.955980] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1719.956286] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-586722ee-ef16-428c-8f61-9081eda21a41 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.963630] env[63241]: DEBUG oslo_vmware.api [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1719.963630] env[63241]: value = "task-1820782" [ 1719.963630] env[63241]: _type = "Task" [ 1719.963630] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.973857] env[63241]: DEBUG oslo_vmware.api [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.068263] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820780, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.103371] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820781, 'name': Rename_Task, 'duration_secs': 0.14336} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.104178] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1720.104178] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c99b08c-7556-426d-894d-5d55d892564a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.113546] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1720.113546] env[63241]: value = "task-1820783" [ 1720.113546] env[63241]: _type = "Task" [ 1720.113546] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.127927] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820783, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.255227] env[63241]: DEBUG nova.objects.base [None req-3f2ea3fe-5aed-4271-a3d1-b57bdb737fdc tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Object Instance<9d301157-6870-4452-9ae6-0d45c4338886> lazy-loaded attributes: flavor,pci_requests {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1720.255227] env[63241]: DEBUG nova.network.neutron [None req-3f2ea3fe-5aed-4271-a3d1-b57bdb737fdc tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1720.345234] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 943100f1-e702-4869-8c19-d81d39712ac5] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1720.452614] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f2ea3fe-5aed-4271-a3d1-b57bdb737fdc tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.207s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.477232] env[63241]: DEBUG oslo_vmware.api [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820782, 'name': PowerOffVM_Task, 'duration_secs': 0.496248} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.477758] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1720.478616] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1720.480466] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e69b87d-acb4-4173-bf5e-491866557a03 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.565153] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820780, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.805684} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.566486] env[63241]: INFO nova.virt.vmwareapi.ds_util [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f65e5b00-38b5-4453-b370-1f56f18053eb/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. [ 1720.566805] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1720.567008] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1720.567207] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Deleting the datastore file [datastore1] 31998a62-70f5-4205-89b9-df8312916126 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1720.568110] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfc1498-c14a-4820-b86d-262c970a4164 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.570766] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c34186e3-5bac-42b9-8930-0255a3df4d20 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.605785] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] f65e5b00-38b5-4453-b370-1f56f18053eb/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1720.611350] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-562fd249-7711-47ce-89c5-455572774fe9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.631950] env[63241]: DEBUG oslo_vmware.api [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1720.631950] env[63241]: value = "task-1820785" [ 1720.631950] env[63241]: _type = "Task" [ 1720.631950] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.641179] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1720.641179] env[63241]: value = "task-1820786" [ 1720.641179] env[63241]: _type = "Task" [ 1720.641179] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.647451] env[63241]: DEBUG oslo_vmware.api [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820785, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.647709] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820783, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.658162] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820786, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.851081] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 7158c64a-5036-419b-b110-7e22c12bf3dd] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1720.856010] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df5544e-056b-45e3-89c7-5c26d87f4100 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.864237] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246e739d-a9e7-497b-b372-8c767eb801ee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.895481] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fa384e-ca43-4286-bbd9-0106358198fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.906325] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40198cbd-9107-443c-b073-ad870e627640 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.924734] env[63241]: DEBUG nova.compute.provider_tree [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1721.145289] env[63241]: DEBUG oslo_vmware.api [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820783, 'name': PowerOnVM_Task, 'duration_secs': 0.552335} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.148532] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1721.148740] env[63241]: INFO nova.compute.manager [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Took 9.52 seconds to spawn the instance on the hypervisor. [ 1721.148936] env[63241]: DEBUG nova.compute.manager [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1721.149260] env[63241]: DEBUG oslo_vmware.api [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820785, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.27454} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.149939] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d55805-af30-4cd4-8234-e19d5a5d9b9e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.154683] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1721.154865] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1721.155082] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1721.155221] env[63241]: INFO nova.compute.manager [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 31998a62-70f5-4205-89b9-df8312916126] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1721.155468] env[63241]: DEBUG oslo.service.loopingcall [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1721.155663] env[63241]: DEBUG nova.compute.manager [-] [instance: 31998a62-70f5-4205-89b9-df8312916126] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1721.155756] env[63241]: DEBUG nova.network.neutron [-] [instance: 31998a62-70f5-4205-89b9-df8312916126] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1721.166039] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820786, 'name': ReconfigVM_Task, 'duration_secs': 0.355161} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.166525] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Reconfigured VM instance instance-00000054 to attach disk [datastore1] f65e5b00-38b5-4453-b370-1f56f18053eb/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1721.167363] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05ee321-8d83-4883-a062-335044c12eab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.195066] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c731202b-fd26-4fdc-bca8-23dfe7a922ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.214989] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1721.214989] env[63241]: value = "task-1820787" [ 1721.214989] env[63241]: _type = "Task" [ 1721.214989] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.225549] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820787, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.354018] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: c3c278a8-0513-4a7f-881e-b71c70206860] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1721.427913] env[63241]: DEBUG nova.scheduler.client.report [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1721.432498] env[63241]: INFO nova.compute.manager [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Swapping old allocation on dict_keys(['9a5e30eb-ceae-4224-aa66-dcbfa98ce24b']) held by migration a30c871a-6336-4949-86a2-c2009c56c7b0 for instance [ 1721.458848] env[63241]: DEBUG nova.scheduler.client.report [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Overwriting current allocation {'allocations': {'9a5e30eb-ceae-4224-aa66-dcbfa98ce24b': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 125}}, 'project_id': '4e6e05a4fd294679b512d6a4dcfebd3f', 'user_id': 'aa971675dc4440df813844c1ed2f2444', 'consumer_generation': 1} on consumer 7f1710d0-857d-41fc-8151-8c5e129dda08 {{(pid=63241) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1721.630620] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.630873] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquired lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.631081] env[63241]: DEBUG nova.network.neutron [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1721.673852] env[63241]: INFO nova.compute.manager [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Took 34.24 seconds to build instance. [ 1721.732107] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820787, 'name': ReconfigVM_Task, 'duration_secs': 0.17089} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.732107] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1721.732107] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c2ec0dc-3171-45c3-9f8d-8e35ff2f441c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.738971] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1721.738971] env[63241]: value = "task-1820788" [ 1721.738971] env[63241]: _type = "Task" [ 1721.738971] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.754520] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.860070] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: a534b054-2143-41c4-a0fa-028339ecdbbf] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1721.936241] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.051s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.938677] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.873s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.941165] env[63241]: INFO nova.compute.claims [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1721.959449] env[63241]: INFO nova.scheduler.client.report [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Deleted allocations for instance a77f7227-0285-48b8-bb3b-f5cfe7ad4646 [ 1722.178424] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a623a993-9356-4c52-b023-57fd5a23a54f tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.752s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.255143] env[63241]: DEBUG oslo_vmware.api [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820788, 'name': PowerOnVM_Task, 'duration_secs': 0.436497} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.255143] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1722.257161] env[63241]: DEBUG nova.compute.manager [None req-eb65cfbb-f843-4742-829b-9d77101edd2b tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1722.258335] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e866ca41-3d85-4d62-9b7a-ecd451f3a1df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.341734] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.341734] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.341734] env[63241]: DEBUG nova.objects.instance [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'flavor' on Instance uuid 9d301157-6870-4452-9ae6-0d45c4338886 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1722.366677] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 3dfeaf57-2244-418e-a04a-ed4143e454d5] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1722.470362] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0bc6f784-577c-4304-a2a9-cfaf3758cdc1 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "a77f7227-0285-48b8-bb3b-f5cfe7ad4646" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.530s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.537152] env[63241]: DEBUG nova.compute.manager [req-ce311242-d2ff-4de1-8fdb-7f217440fb7b req-f4408ebb-ffd5-4d52-87e0-7e8634c9fcdd service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] Received event network-vif-deleted-3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1722.537152] env[63241]: INFO nova.compute.manager [req-ce311242-d2ff-4de1-8fdb-7f217440fb7b req-f4408ebb-ffd5-4d52-87e0-7e8634c9fcdd service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] Neutron deleted interface 3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7; detaching it from the instance and deleting it from the info cache [ 1722.537152] env[63241]: DEBUG nova.network.neutron [req-ce311242-d2ff-4de1-8fdb-7f217440fb7b req-f4408ebb-ffd5-4d52-87e0-7e8634c9fcdd service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.714235] env[63241]: DEBUG nova.network.neutron [-] [instance: 31998a62-70f5-4205-89b9-df8312916126] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.733209] env[63241]: DEBUG nova.network.neutron [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance_info_cache with network_info: [{"id": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "address": "fa:16:3e:72:df:ac", "network": {"id": "3282d58a-d86d-4733-a3df-00f00dfc2299", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.145", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "879e1def15b743fd96e9c706b3cdb82f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56f1b482-fc", "ovs_interfaceid": "56f1b482-fc2c-45e5-9aca-99ff209a166e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.870612] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: eaed706d-b3db-46ed-8c70-08f80479afa4] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1723.043947] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-644eaee2-8833-47a3-813d-44231b5f7c98 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.057291] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6602346d-98fd-427b-82a2-f7bbb57d3d95 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.091117] env[63241]: DEBUG nova.objects.instance [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'pci_requests' on Instance uuid 9d301157-6870-4452-9ae6-0d45c4338886 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1723.118310] env[63241]: DEBUG nova.compute.manager [req-ce311242-d2ff-4de1-8fdb-7f217440fb7b req-f4408ebb-ffd5-4d52-87e0-7e8634c9fcdd service nova] [instance: 31998a62-70f5-4205-89b9-df8312916126] Detach interface failed, port_id=3a7fb03d-c1d2-4e76-a5d7-9f0877d349b7, reason: Instance 31998a62-70f5-4205-89b9-df8312916126 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1723.218328] env[63241]: INFO nova.compute.manager [-] [instance: 31998a62-70f5-4205-89b9-df8312916126] Took 2.06 seconds to deallocate network for instance. [ 1723.238089] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Releasing lock "refresh_cache-7f1710d0-857d-41fc-8151-8c5e129dda08" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.238637] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1723.238889] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5d67b06b-ad59-43bb-adbf-15c4ae375e65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.252506] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1723.252506] env[63241]: value = "task-1820789" [ 1723.252506] env[63241]: _type = "Task" [ 1723.252506] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.267448] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.308789] env[63241]: DEBUG nova.compute.manager [req-5695bc80-cf66-4781-ab25-3ad828e39cd0 req-31342de8-a4af-4d46-8a65-7f99939be785 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Received event network-changed-7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1723.309195] env[63241]: DEBUG nova.compute.manager [req-5695bc80-cf66-4781-ab25-3ad828e39cd0 req-31342de8-a4af-4d46-8a65-7f99939be785 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Refreshing instance network info cache due to event network-changed-7a0be842-edfe-48ff-9275-dbb260c7e781. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1723.309528] env[63241]: DEBUG oslo_concurrency.lockutils [req-5695bc80-cf66-4781-ab25-3ad828e39cd0 req-31342de8-a4af-4d46-8a65-7f99939be785 service nova] Acquiring lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.310212] env[63241]: DEBUG oslo_concurrency.lockutils [req-5695bc80-cf66-4781-ab25-3ad828e39cd0 req-31342de8-a4af-4d46-8a65-7f99939be785 service nova] Acquired lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.310569] env[63241]: DEBUG nova.network.neutron [req-5695bc80-cf66-4781-ab25-3ad828e39cd0 req-31342de8-a4af-4d46-8a65-7f99939be785 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Refreshing network info cache for port 7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1723.376031] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e753da08-d4a5-4f17-85c8-154e843798c9] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1723.426349] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ea0a10-437c-43a9-a348-e81ebbe71aec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.435990] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6231ed0-d835-447c-8c90-15662fb7c880 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.478942] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732c05a6-5c96-46a0-9bf7-99b71d7404ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.489835] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb6b2c8-5660-4cd6-b895-98ef28a75ac4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.506097] env[63241]: DEBUG nova.compute.provider_tree [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1723.594008] env[63241]: DEBUG nova.objects.base [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Object Instance<9d301157-6870-4452-9ae6-0d45c4338886> lazy-loaded attributes: flavor,pci_requests {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1723.594253] env[63241]: DEBUG nova.network.neutron [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1723.730536] env[63241]: DEBUG oslo_concurrency.lockutils [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.747942] env[63241]: DEBUG nova.policy [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54dc853b6f204a75ae7612f9fbd2d1f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecafb0abbdc74501b22b20b797c4c60c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1723.762681] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820789, 'name': PowerOffVM_Task, 'duration_secs': 0.260758} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.762961] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1723.763679] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:28:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='6d3f31be-51b1-4783-a8b9-92005f2fb457',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-13492411',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1723.763968] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1723.764059] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1723.764252] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1723.764699] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1723.764699] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1723.764815] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1723.764934] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1723.765098] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1723.765245] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1723.765420] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1723.770870] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1634976-c8de-4215-bb46-e477c0179ff2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.786379] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1723.786379] env[63241]: value = "task-1820790" [ 1723.786379] env[63241]: _type = "Task" [ 1723.786379] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.795086] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820790, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.878151] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 2d1425f2-ddf9-4e82-bcfe-e11c597d011a] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1724.010689] env[63241]: DEBUG nova.scheduler.client.report [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1724.176553] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "73ea6bff-60da-4691-a569-f4e9ae92f701" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.181028] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "73ea6bff-60da-4691-a569-f4e9ae92f701" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.181028] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "73ea6bff-60da-4691-a569-f4e9ae92f701-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.181028] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "73ea6bff-60da-4691-a569-f4e9ae92f701-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.181028] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "73ea6bff-60da-4691-a569-f4e9ae92f701-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.181028] env[63241]: INFO nova.compute.manager [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Terminating instance [ 1724.183376] env[63241]: DEBUG nova.compute.manager [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1724.183770] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1724.184936] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c582ef9-74ff-4b37-a8ef-0725052da72d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.195136] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1724.195136] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52185fce-468f-4d72-91f3-d1e009ad319f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.218135] env[63241]: DEBUG oslo_vmware.api [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1724.218135] env[63241]: value = "task-1820791" [ 1724.218135] env[63241]: _type = "Task" [ 1724.218135] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.220302] env[63241]: DEBUG oslo_vmware.api [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820791, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.298143] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820790, 'name': ReconfigVM_Task, 'duration_secs': 0.267729} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.298143] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd79f4ff-d5bb-4d9d-a2ed-8da3f14c5c21 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.302480] env[63241]: DEBUG nova.objects.instance [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lazy-loading 'flavor' on Instance uuid e3842404-2c80-4fa9-b0c9-c58c484845a2 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1724.322604] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:28:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='6d3f31be-51b1-4783-a8b9-92005f2fb457',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-13492411',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1724.322857] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1724.323039] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1724.324512] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1724.324512] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1724.324512] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1724.324512] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1724.324512] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1724.324512] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1724.324904] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1724.324904] env[63241]: DEBUG nova.virt.hardware [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1724.326350] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bd1e14e-918f-4e0f-aea0-1d692097fb83 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.333093] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1724.333093] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523692b0-364c-897c-3be4-3ed12088ce05" [ 1724.333093] env[63241]: _type = "Task" [ 1724.333093] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.344027] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523692b0-364c-897c-3be4-3ed12088ce05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.381646] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e3df56a7-eb82-4297-8aa3-f77c0380b6ec] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1724.518858] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.519449] env[63241]: DEBUG nova.compute.manager [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1724.522719] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.191s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.523037] env[63241]: DEBUG nova.objects.instance [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lazy-loading 'resources' on Instance uuid 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1724.583486] env[63241]: DEBUG nova.network.neutron [req-5695bc80-cf66-4781-ab25-3ad828e39cd0 req-31342de8-a4af-4d46-8a65-7f99939be785 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updated VIF entry in instance network info cache for port 7a0be842-edfe-48ff-9275-dbb260c7e781. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1724.583956] env[63241]: DEBUG nova.network.neutron [req-5695bc80-cf66-4781-ab25-3ad828e39cd0 req-31342de8-a4af-4d46-8a65-7f99939be785 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updating instance_info_cache with network_info: [{"id": "7a0be842-edfe-48ff-9275-dbb260c7e781", "address": "fa:16:3e:aa:cc:cf", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0be842-ed", "ovs_interfaceid": "7a0be842-edfe-48ff-9275-dbb260c7e781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.665745] env[63241]: DEBUG nova.network.neutron [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Successfully created port: ceb9556a-7bff-425f-bfd2-a6dba68ba438 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1724.721318] env[63241]: DEBUG oslo_vmware.api [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820791, 'name': PowerOffVM_Task, 'duration_secs': 0.222167} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.721813] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1724.722120] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1724.722473] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6cca24d7-e2e8-4cdb-8da5-10a5f4a84b7d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.811952] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquiring lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.812538] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquired lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.846138] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523692b0-364c-897c-3be4-3ed12088ce05, 'name': SearchDatastore_Task, 'duration_secs': 0.011192} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.852576] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Reconfiguring VM instance instance-00000048 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1724.852860] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2596631c-660e-46da-be2e-007c1af8fb92 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.876709] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1724.876709] env[63241]: value = "task-1820793" [ 1724.876709] env[63241]: _type = "Task" [ 1724.876709] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.885457] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 5203c12e-14a0-4736-8185-8ead9a29b03b] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1724.887915] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820793, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.033111] env[63241]: DEBUG nova.compute.utils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1725.034617] env[63241]: DEBUG nova.compute.manager [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1725.034789] env[63241]: DEBUG nova.network.neutron [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1725.086855] env[63241]: DEBUG oslo_concurrency.lockutils [req-5695bc80-cf66-4781-ab25-3ad828e39cd0 req-31342de8-a4af-4d46-8a65-7f99939be785 service nova] Releasing lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.191393] env[63241]: DEBUG nova.policy [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa5224c96c3545269f4f45be620a7cdf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98582d7ee18145318ee5a05cac36781e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1725.387731] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820793, 'name': ReconfigVM_Task, 'duration_secs': 0.341529} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.388398] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0b7c72e0-79b9-4435-9676-7a0e9afaf936] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1725.390918] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Reconfigured VM instance instance-00000048 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1725.395232] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1ee235-cb1e-4652-af92-91cf8d81f926 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.422301] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 7f1710d0-857d-41fc-8151-8c5e129dda08/7f1710d0-857d-41fc-8151-8c5e129dda08.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1725.423625] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-654c8b31-d0e9-448f-8e5e-88459b9b9986 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.439446] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3c5ce4-5f99-487c-9113-db8c23237b2a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.449128] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc143377-9154-4bab-a675-b4f6ab3da11d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.457792] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1725.457792] env[63241]: value = "task-1820794" [ 1725.457792] env[63241]: _type = "Task" [ 1725.457792] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.488217] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f761a522-b83b-494d-ac8b-e0d700c6c956 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.493881] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820794, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.500502] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c030ba4-681c-4c81-a329-a199de15ad99 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.513315] env[63241]: DEBUG nova.compute.provider_tree [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1725.540854] env[63241]: DEBUG nova.compute.manager [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1725.596463] env[63241]: DEBUG nova.network.neutron [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1725.895440] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: efbe39fa-d581-41ac-b51c-9c94c9839d7a] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1725.971308] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820794, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.017139] env[63241]: DEBUG nova.scheduler.client.report [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1726.062381] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1726.062622] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1726.062798] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Deleting the datastore file [datastore1] 73ea6bff-60da-4691-a569-f4e9ae92f701 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1726.063092] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5bd67752-0049-4d46-b013-d4b3eed85cb6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.072665] env[63241]: DEBUG oslo_vmware.api [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1726.072665] env[63241]: value = "task-1820795" [ 1726.072665] env[63241]: _type = "Task" [ 1726.072665] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.084989] env[63241]: DEBUG oslo_vmware.api [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820795, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.220856] env[63241]: DEBUG nova.network.neutron [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Successfully created port: be5471bd-3bc7-4ef4-9ea6-be69b0420644 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1726.399183] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: bef91c1c-a418-4464-ae7b-883ffb7e9695] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1726.470506] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820794, 'name': ReconfigVM_Task, 'duration_secs': 0.709037} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.470890] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 7f1710d0-857d-41fc-8151-8c5e129dda08/7f1710d0-857d-41fc-8151-8c5e129dda08.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1726.472132] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f440dd8-55a7-4570-bdca-64eea584146e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.499015] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210ab7f0-1fc6-4005-aed0-f58f18ef8fde {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.524231] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.526320] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.180s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.526805] env[63241]: DEBUG nova.objects.instance [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lazy-loading 'resources' on Instance uuid 381bba62-49a7-4d6f-b12a-741f5d884fe5 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1726.528181] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a980dcf0-1caf-400a-9819-430788db1c1f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.549586] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20109b4-f2a9-48eb-b275-b78d22b34a32 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.553765] env[63241]: DEBUG nova.compute.manager [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1726.561967] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1726.562464] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2777bd49-bbfc-4340-a1b1-e04f98cb6a7e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.565346] env[63241]: INFO nova.scheduler.client.report [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted allocations for instance 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490 [ 1726.571982] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1726.571982] env[63241]: value = "task-1820796" [ 1726.571982] env[63241]: _type = "Task" [ 1726.571982] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.585270] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820796, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.590755] env[63241]: DEBUG oslo_vmware.api [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820795, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181805} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.593180] env[63241]: DEBUG nova.virt.hardware [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1726.593445] env[63241]: DEBUG nova.virt.hardware [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1726.593865] env[63241]: DEBUG nova.virt.hardware [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1726.593865] env[63241]: DEBUG nova.virt.hardware [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1726.594226] env[63241]: DEBUG nova.virt.hardware [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1726.594464] env[63241]: DEBUG nova.virt.hardware [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1726.594703] env[63241]: DEBUG nova.virt.hardware [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1726.594871] env[63241]: DEBUG nova.virt.hardware [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1726.595113] env[63241]: DEBUG nova.virt.hardware [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1726.595313] env[63241]: DEBUG nova.virt.hardware [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1726.595557] env[63241]: DEBUG nova.virt.hardware [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1726.595943] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1726.596144] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1726.596367] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1726.596618] env[63241]: INFO nova.compute.manager [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Took 2.41 seconds to destroy the instance on the hypervisor. [ 1726.596873] env[63241]: DEBUG oslo.service.loopingcall [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1726.598480] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35b1017-b5e5-4238-9f29-c62fe90d6500 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.601789] env[63241]: DEBUG nova.compute.manager [-] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1726.601789] env[63241]: DEBUG nova.network.neutron [-] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1726.611048] env[63241]: DEBUG nova.network.neutron [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updating instance_info_cache with network_info: [{"id": "98619b24-0318-422e-90bb-ed8db3309905", "address": "fa:16:3e:2b:bb:d0", "network": {"id": "7b2cb854-43f8-4d73-b16a-16b0e34b29e4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1952186645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89b38fc89ac4f039a89fb9bf42dbc5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98619b24-03", "ovs_interfaceid": "98619b24-0318-422e-90bb-ed8db3309905", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.613331] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f23349-052e-4ab9-8dff-a33ba20e41fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.891570] env[63241]: DEBUG nova.network.neutron [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Successfully updated port: ceb9556a-7bff-425f-bfd2-a6dba68ba438 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1726.905442] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 81854e13-e0c1-43a9-8529-678d56d57bbf] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1727.075159] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73d41565-f072-43b4-8241-0cb8cfcd6a69 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "44ddb1f0-fd5c-4c9e-baf2-eec09d80f490" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.465s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.092340] env[63241]: DEBUG oslo_vmware.api [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820796, 'name': PowerOnVM_Task, 'duration_secs': 0.44385} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.092340] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1727.125255] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Releasing lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.125255] env[63241]: DEBUG nova.compute.manager [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Inject network info {{(pid=63241) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1727.125255] env[63241]: DEBUG nova.compute.manager [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] network_info to inject: |[{"id": "98619b24-0318-422e-90bb-ed8db3309905", "address": "fa:16:3e:2b:bb:d0", "network": {"id": "7b2cb854-43f8-4d73-b16a-16b0e34b29e4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1952186645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89b38fc89ac4f039a89fb9bf42dbc5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98619b24-03", "ovs_interfaceid": "98619b24-0318-422e-90bb-ed8db3309905", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1727.128364] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Reconfiguring VM instance to set the machine id {{(pid=63241) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1727.128974] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0127d158-0ebc-46b2-b83a-299fc62bbd5a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.152188] env[63241]: DEBUG oslo_vmware.api [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1727.152188] env[63241]: value = "task-1820797" [ 1727.152188] env[63241]: _type = "Task" [ 1727.152188] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.167171] env[63241]: DEBUG oslo_vmware.api [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820797, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.343761] env[63241]: DEBUG nova.compute.manager [req-1aa9819e-b831-41c6-95c4-df68e7a9e7df req-c60ba49e-7907-4911-9c60-095e58118bc7 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Received event network-changed-aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1727.343999] env[63241]: DEBUG nova.compute.manager [req-1aa9819e-b831-41c6-95c4-df68e7a9e7df req-c60ba49e-7907-4911-9c60-095e58118bc7 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Refreshing instance network info cache due to event network-changed-aac5cae6-1124-4f0a-9270-ff1f4982fff4. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1727.344305] env[63241]: DEBUG oslo_concurrency.lockutils [req-1aa9819e-b831-41c6-95c4-df68e7a9e7df req-c60ba49e-7907-4911-9c60-095e58118bc7 service nova] Acquiring lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.344485] env[63241]: DEBUG oslo_concurrency.lockutils [req-1aa9819e-b831-41c6-95c4-df68e7a9e7df req-c60ba49e-7907-4911-9c60-095e58118bc7 service nova] Acquired lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.344681] env[63241]: DEBUG nova.network.neutron [req-1aa9819e-b831-41c6-95c4-df68e7a9e7df req-c60ba49e-7907-4911-9c60-095e58118bc7 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Refreshing network info cache for port aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1727.399229] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.399229] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.399229] env[63241]: DEBUG nova.network.neutron [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1727.405931] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8cfa7e-8748-482b-9273-9f471fa2852f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.411877] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: c1c85cc0-53f1-4920-8f3e-6dd69414fa85] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1727.418433] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14afcf73-31e6-4c54-b0a3-6e52bf8c5d9e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.456329] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeac6bae-95fd-4380-acb1-5500e23a6310 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.465431] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1578b3d5-bc64-47bf-a820-4060ce50530f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.484141] env[63241]: DEBUG nova.compute.provider_tree [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1727.567689] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "b7378019-a572-4d4d-a82d-cee13a1b6a88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.568333] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "b7378019-a572-4d4d-a82d-cee13a1b6a88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.608257] env[63241]: DEBUG nova.network.neutron [-] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.670286] env[63241]: DEBUG oslo_vmware.api [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820797, 'name': ReconfigVM_Task, 'duration_secs': 0.172976} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.670591] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b528707f-c2b8-4133-9bc1-c8ae8def861a tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Reconfigured VM instance to set the machine id {{(pid=63241) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1727.914575] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 343a7e90-5e55-4125-8475-44050f267987] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1727.961507] env[63241]: WARNING nova.network.neutron [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] cafd3d43-975a-4836-8948-2f1b47e56666 already exists in list: networks containing: ['cafd3d43-975a-4836-8948-2f1b47e56666']. ignoring it [ 1727.987587] env[63241]: DEBUG nova.scheduler.client.report [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1728.042529] env[63241]: DEBUG nova.compute.manager [req-3d059ab2-78bc-467b-8428-896603bd986a req-6a98cbc7-a57e-413b-84bc-ed7fe96d2087 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Received event network-changed-98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1728.042982] env[63241]: DEBUG nova.compute.manager [req-3d059ab2-78bc-467b-8428-896603bd986a req-6a98cbc7-a57e-413b-84bc-ed7fe96d2087 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Refreshing instance network info cache due to event network-changed-98619b24-0318-422e-90bb-ed8db3309905. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1728.042982] env[63241]: DEBUG oslo_concurrency.lockutils [req-3d059ab2-78bc-467b-8428-896603bd986a req-6a98cbc7-a57e-413b-84bc-ed7fe96d2087 service nova] Acquiring lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1728.043113] env[63241]: DEBUG oslo_concurrency.lockutils [req-3d059ab2-78bc-467b-8428-896603bd986a req-6a98cbc7-a57e-413b-84bc-ed7fe96d2087 service nova] Acquired lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1728.043452] env[63241]: DEBUG nova.network.neutron [req-3d059ab2-78bc-467b-8428-896603bd986a req-6a98cbc7-a57e-413b-84bc-ed7fe96d2087 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Refreshing network info cache for port 98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1728.072013] env[63241]: DEBUG nova.compute.manager [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1728.108686] env[63241]: INFO nova.compute.manager [None req-0aa18b75-c837-413a-a655-8e125cdd9d4d tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance to original state: 'active' [ 1728.111732] env[63241]: INFO nova.compute.manager [-] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Took 1.51 seconds to deallocate network for instance. [ 1728.394243] env[63241]: DEBUG nova.network.neutron [req-1aa9819e-b831-41c6-95c4-df68e7a9e7df req-c60ba49e-7907-4911-9c60-095e58118bc7 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updated VIF entry in instance network info cache for port aac5cae6-1124-4f0a-9270-ff1f4982fff4. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1728.394570] env[63241]: DEBUG nova.network.neutron [req-1aa9819e-b831-41c6-95c4-df68e7a9e7df req-c60ba49e-7907-4911-9c60-095e58118bc7 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updating instance_info_cache with network_info: [{"id": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "address": "fa:16:3e:d2:d7:ea", "network": {"id": "d35148d1-0dba-4e39-9bc1-ad561f93e9f8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1579993511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e7a8a40a56ee42dca4190ac78e5f22ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c2b89fb-df8c-47c0-83ae-44291236feb4", "external-id": "nsx-vlan-transportzone-174", "segmentation_id": 174, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac5cae6-11", "ovs_interfaceid": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.419129] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 78894fda-8309-430a-ab38-ce1a415d83d3] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1728.493955] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.967s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.500359] env[63241]: DEBUG oslo_concurrency.lockutils [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.612s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.500359] env[63241]: DEBUG nova.objects.instance [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lazy-loading 'resources' on Instance uuid cb7eb689-b8f6-479d-aa6b-c27fab16e131 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1728.534122] env[63241]: INFO nova.scheduler.client.report [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted allocations for instance 381bba62-49a7-4d6f-b12a-741f5d884fe5 [ 1728.550991] env[63241]: DEBUG nova.network.neutron [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updating instance_info_cache with network_info: [{"id": "10657b5b-6750-4389-b802-7e6bee8963e7", "address": "fa:16:3e:ea:10:72", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10657b5b-67", "ovs_interfaceid": "10657b5b-6750-4389-b802-7e6bee8963e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ceb9556a-7bff-425f-bfd2-a6dba68ba438", "address": "fa:16:3e:f1:59:43", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapceb9556a-7b", "ovs_interfaceid": "ceb9556a-7bff-425f-bfd2-a6dba68ba438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.609350] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.623862] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.897491] env[63241]: DEBUG oslo_concurrency.lockutils [req-1aa9819e-b831-41c6-95c4-df68e7a9e7df req-c60ba49e-7907-4911-9c60-095e58118bc7 service nova] Releasing lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.926765] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 2b1805b3-2e03-410f-8222-64b8542d4a43] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1728.958872] env[63241]: DEBUG nova.network.neutron [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Successfully updated port: be5471bd-3bc7-4ef4-9ea6-be69b0420644 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1729.045769] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c866f0e6-8738-4063-a80f-6aeab53328c9 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "381bba62-49a7-4d6f-b12a-741f5d884fe5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.529s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.054962] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.056686] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.056925] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.057959] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b92896-86a1-4d03-9f6a-f66707cd6cdd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.080360] env[63241]: DEBUG nova.virt.hardware [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1729.080610] env[63241]: DEBUG nova.virt.hardware [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1729.080768] env[63241]: DEBUG nova.virt.hardware [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1729.080954] env[63241]: DEBUG nova.virt.hardware [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1729.081115] env[63241]: DEBUG nova.virt.hardware [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1729.081264] env[63241]: DEBUG nova.virt.hardware [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1729.081470] env[63241]: DEBUG nova.virt.hardware [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1729.081628] env[63241]: DEBUG nova.virt.hardware [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1729.081814] env[63241]: DEBUG nova.virt.hardware [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1729.081951] env[63241]: DEBUG nova.virt.hardware [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1729.082144] env[63241]: DEBUG nova.virt.hardware [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1729.089143] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Reconfiguring VM to attach interface {{(pid=63241) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1729.093130] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-292340af-3173-470a-ba5f-775145b464a2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.113781] env[63241]: DEBUG oslo_vmware.api [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1729.113781] env[63241]: value = "task-1820798" [ 1729.113781] env[63241]: _type = "Task" [ 1729.113781] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.125196] env[63241]: DEBUG oslo_vmware.api [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820798, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.223007] env[63241]: DEBUG nova.network.neutron [req-3d059ab2-78bc-467b-8428-896603bd986a req-6a98cbc7-a57e-413b-84bc-ed7fe96d2087 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updated VIF entry in instance network info cache for port 98619b24-0318-422e-90bb-ed8db3309905. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1729.224070] env[63241]: DEBUG nova.network.neutron [req-3d059ab2-78bc-467b-8428-896603bd986a req-6a98cbc7-a57e-413b-84bc-ed7fe96d2087 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updating instance_info_cache with network_info: [{"id": "98619b24-0318-422e-90bb-ed8db3309905", "address": "fa:16:3e:2b:bb:d0", "network": {"id": "7b2cb854-43f8-4d73-b16a-16b0e34b29e4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1952186645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89b38fc89ac4f039a89fb9bf42dbc5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98619b24-03", "ovs_interfaceid": "98619b24-0318-422e-90bb-ed8db3309905", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1729.419102] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a85856-9468-46c9-8048-ce4ac7db7d82 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.429799] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: c390d1ca-a199-4df6-847a-b543630a7bf5] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1729.434394] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390006da-7d80-42df-b81f-84c875541d29 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.475570] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.475901] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.476088] env[63241]: DEBUG nova.network.neutron [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1729.479333] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3beeb1-d3b4-4889-be24-25afaa5a201f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.488610] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1061d756-f981-461e-b063-7ad995a1053c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.505842] env[63241]: DEBUG nova.compute.provider_tree [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1729.626640] env[63241]: DEBUG oslo_vmware.api [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.726905] env[63241]: DEBUG oslo_concurrency.lockutils [req-3d059ab2-78bc-467b-8428-896603bd986a req-6a98cbc7-a57e-413b-84bc-ed7fe96d2087 service nova] Releasing lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.935021] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e2758650-2762-49f6-a678-f55425a89994] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1730.011021] env[63241]: DEBUG nova.scheduler.client.report [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1730.013630] env[63241]: DEBUG nova.network.neutron [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1730.125376] env[63241]: DEBUG oslo_vmware.api [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.217330] env[63241]: DEBUG nova.network.neutron [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance_info_cache with network_info: [{"id": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "address": "fa:16:3e:98:e9:3c", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5471bd-3b", "ovs_interfaceid": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1730.434962] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 27177719-5090-43de-9bca-6db6bebab7b4] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1730.516922] env[63241]: DEBUG oslo_concurrency.lockutils [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.018s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.519236] env[63241]: DEBUG oslo_concurrency.lockutils [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.789s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.519474] env[63241]: DEBUG nova.objects.instance [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lazy-loading 'resources' on Instance uuid 31998a62-70f5-4205-89b9-df8312916126 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1730.543961] env[63241]: INFO nova.scheduler.client.report [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted allocations for instance cb7eb689-b8f6-479d-aa6b-c27fab16e131 [ 1730.625850] env[63241]: DEBUG oslo_vmware.api [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.720587] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.720943] env[63241]: DEBUG nova.compute.manager [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Instance network_info: |[{"id": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "address": "fa:16:3e:98:e9:3c", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5471bd-3b", "ovs_interfaceid": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1730.721436] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:e9:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '111a2767-1b06-4fe5-852b-40c9b5a699fd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be5471bd-3bc7-4ef4-9ea6-be69b0420644', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1730.730691] env[63241]: DEBUG oslo.service.loopingcall [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1730.731667] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1730.731977] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92fbb0b3-2287-4214-a19f-19ce64bf396c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.755925] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1730.755925] env[63241]: value = "task-1820799" [ 1730.755925] env[63241]: _type = "Task" [ 1730.755925] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.765069] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820799, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.938151] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: f1c19f17-ce7c-481a-99fd-d0bb20f1520b] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1731.059220] env[63241]: DEBUG oslo_concurrency.lockutils [None req-aeda31a4-aef4-4f89-bc27-8eb86d01addb tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "cb7eb689-b8f6-479d-aa6b-c27fab16e131" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.748s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.129144] env[63241]: DEBUG oslo_vmware.api [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820798, 'name': ReconfigVM_Task, 'duration_secs': 1.993847} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.130033] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.130033] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Reconfigured VM to attach interface {{(pid=63241) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1731.274056] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820799, 'name': CreateVM_Task, 'duration_secs': 0.431665} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.274249] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1731.275366] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.275366] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.275655] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1731.275808] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-060b00c1-e3fa-42fd-9838-d6b0e31ea3da {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.282553] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1731.282553] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52297ca8-0874-baad-f129-38b179550585" [ 1731.282553] env[63241]: _type = "Task" [ 1731.282553] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.287029] env[63241]: DEBUG nova.compute.manager [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Received event network-vif-deleted-328d662e-d2e8-4f8a-94b4-dacebf42accf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1731.287029] env[63241]: DEBUG nova.compute.manager [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Received event network-changed-aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1731.287029] env[63241]: DEBUG nova.compute.manager [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Refreshing instance network info cache due to event network-changed-aac5cae6-1124-4f0a-9270-ff1f4982fff4. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1731.287029] env[63241]: DEBUG oslo_concurrency.lockutils [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] Acquiring lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.287029] env[63241]: DEBUG oslo_concurrency.lockutils [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] Acquired lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.287029] env[63241]: DEBUG nova.network.neutron [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Refreshing network info cache for port aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1731.314393] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52297ca8-0874-baad-f129-38b179550585, 'name': SearchDatastore_Task, 'duration_secs': 0.012424} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.318048] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.318325] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1731.318537] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.318686] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.318905] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1731.320216] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8d30854-0cb7-4c10-b16b-9cfa1ff5b9f2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.331374] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1731.331442] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1731.335465] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88f1b074-4d56-4488-9e07-2ad043fa884d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.344253] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1731.344253] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b521c7-b782-0098-4994-b3b095e589a0" [ 1731.344253] env[63241]: _type = "Task" [ 1731.344253] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.355288] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b521c7-b782-0098-4994-b3b095e589a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.411314] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.411651] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.425558] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c3c1b1-de97-49ac-bd6c-795e056c9636 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.434522] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc927ff7-ea5d-4beb-be55-f8e156b1852a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.440991] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 40217405-dcba-48cf-9d92-4122390d9fa8] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1731.473492] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25aec77d-ddc1-4c4b-b478-8d3b2dc3007f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.480444] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c2abd7-1991-41db-82f4-ac8412220cf4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.494441] env[63241]: DEBUG nova.compute.provider_tree [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1731.572054] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.572334] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.572547] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.574350] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.574350] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.575446] env[63241]: INFO nova.compute.manager [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Terminating instance [ 1731.577344] env[63241]: DEBUG nova.compute.manager [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1731.577693] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1731.578514] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8854060-7722-4eaa-a4a0-9110d750d91b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.587470] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1731.588350] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1ed8fa3-8d37-44a9-8162-11dbc3fcbd82 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.595231] env[63241]: DEBUG oslo_vmware.api [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1731.595231] env[63241]: value = "task-1820800" [ 1731.595231] env[63241]: _type = "Task" [ 1731.595231] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.606698] env[63241]: DEBUG oslo_vmware.api [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.636106] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5ce524ec-469e-4dec-b454-84fefc55f14e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 9.294s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.856286] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b521c7-b782-0098-4994-b3b095e589a0, 'name': SearchDatastore_Task, 'duration_secs': 0.00993} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.856964] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72e96044-6e09-4e7d-85a1-21c78cb79b3f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.866027] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1731.866027] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5245d07a-418c-4173-4a0e-8d124be1a5e3" [ 1731.866027] env[63241]: _type = "Task" [ 1731.866027] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.875158] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5245d07a-418c-4173-4a0e-8d124be1a5e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.915854] env[63241]: DEBUG nova.compute.manager [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1731.971723] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 5060e745-08d0-429e-8780-bfdad7a29f30] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1731.999919] env[63241]: DEBUG nova.scheduler.client.report [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1732.107298] env[63241]: DEBUG oslo_vmware.api [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820800, 'name': PowerOffVM_Task, 'duration_secs': 0.273325} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.107735] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1732.107976] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1732.108324] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfb70404-cf8d-47f3-b346-c2cc7afd9fab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.194911] env[63241]: DEBUG nova.network.neutron [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updated VIF entry in instance network info cache for port aac5cae6-1124-4f0a-9270-ff1f4982fff4. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1732.195317] env[63241]: DEBUG nova.network.neutron [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updating instance_info_cache with network_info: [{"id": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "address": "fa:16:3e:d2:d7:ea", "network": {"id": "d35148d1-0dba-4e39-9bc1-ad561f93e9f8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1579993511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e7a8a40a56ee42dca4190ac78e5f22ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c2b89fb-df8c-47c0-83ae-44291236feb4", "external-id": "nsx-vlan-transportzone-174", "segmentation_id": 174, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac5cae6-11", "ovs_interfaceid": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.277907] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1732.278185] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1732.278382] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleting the datastore file [datastore1] f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1732.278650] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96a9a6ab-12ee-4c5e-a05b-674a4ccca376 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.290110] env[63241]: DEBUG oslo_vmware.api [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1732.290110] env[63241]: value = "task-1820802" [ 1732.290110] env[63241]: _type = "Task" [ 1732.290110] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.300106] env[63241]: DEBUG oslo_vmware.api [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.378260] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5245d07a-418c-4173-4a0e-8d124be1a5e3, 'name': SearchDatastore_Task, 'duration_secs': 0.03342} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.378421] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.378697] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 864175e0-33f0-429f-bdf6-722d9b00da2b/864175e0-33f0-429f-bdf6-722d9b00da2b.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1732.378966] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-652fc3e4-b52f-4333-b0c6-985c8c3220ae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.386229] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1732.386229] env[63241]: value = "task-1820803" [ 1732.386229] env[63241]: _type = "Task" [ 1732.386229] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.394220] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.441814] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.475263] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: ac35fa03-aeca-4e18-84ab-cb80bb4cabfd] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1732.504775] env[63241]: DEBUG oslo_concurrency.lockutils [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.985s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.508080] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.898s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.510364] env[63241]: INFO nova.compute.claims [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1732.518404] env[63241]: DEBUG nova.compute.manager [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received event network-vif-plugged-ceb9556a-7bff-425f-bfd2-a6dba68ba438 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1732.518695] env[63241]: DEBUG oslo_concurrency.lockutils [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] Acquiring lock "9d301157-6870-4452-9ae6-0d45c4338886-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.518868] env[63241]: DEBUG oslo_concurrency.lockutils [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] Lock "9d301157-6870-4452-9ae6-0d45c4338886-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.519033] env[63241]: DEBUG oslo_concurrency.lockutils [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] Lock "9d301157-6870-4452-9ae6-0d45c4338886-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.519258] env[63241]: DEBUG nova.compute.manager [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] No waiting events found dispatching network-vif-plugged-ceb9556a-7bff-425f-bfd2-a6dba68ba438 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1732.519411] env[63241]: WARNING nova.compute.manager [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received unexpected event network-vif-plugged-ceb9556a-7bff-425f-bfd2-a6dba68ba438 for instance with vm_state active and task_state None. [ 1732.519643] env[63241]: DEBUG nova.compute.manager [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received event network-changed-ceb9556a-7bff-425f-bfd2-a6dba68ba438 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1732.519821] env[63241]: DEBUG nova.compute.manager [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Refreshing instance network info cache due to event network-changed-ceb9556a-7bff-425f-bfd2-a6dba68ba438. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1732.520092] env[63241]: DEBUG oslo_concurrency.lockutils [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] Acquiring lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.520232] env[63241]: DEBUG oslo_concurrency.lockutils [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] Acquired lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.520859] env[63241]: DEBUG nova.network.neutron [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Refreshing network info cache for port ceb9556a-7bff-425f-bfd2-a6dba68ba438 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1732.526018] env[63241]: INFO nova.scheduler.client.report [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Deleted allocations for instance 31998a62-70f5-4205-89b9-df8312916126 [ 1732.554262] env[63241]: DEBUG nova.objects.instance [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lazy-loading 'flavor' on Instance uuid e3842404-2c80-4fa9-b0c9-c58c484845a2 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1732.698622] env[63241]: DEBUG oslo_concurrency.lockutils [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] Releasing lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.698919] env[63241]: DEBUG nova.compute.manager [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Received event network-vif-plugged-be5471bd-3bc7-4ef4-9ea6-be69b0420644 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1732.699153] env[63241]: DEBUG oslo_concurrency.lockutils [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] Acquiring lock "864175e0-33f0-429f-bdf6-722d9b00da2b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.699379] env[63241]: DEBUG oslo_concurrency.lockutils [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.699556] env[63241]: DEBUG oslo_concurrency.lockutils [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.699745] env[63241]: DEBUG nova.compute.manager [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] No waiting events found dispatching network-vif-plugged-be5471bd-3bc7-4ef4-9ea6-be69b0420644 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1732.699923] env[63241]: WARNING nova.compute.manager [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Received unexpected event network-vif-plugged-be5471bd-3bc7-4ef4-9ea6-be69b0420644 for instance with vm_state building and task_state spawning. [ 1732.700107] env[63241]: DEBUG nova.compute.manager [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Received event network-changed-be5471bd-3bc7-4ef4-9ea6-be69b0420644 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1732.700263] env[63241]: DEBUG nova.compute.manager [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Refreshing instance network info cache due to event network-changed-be5471bd-3bc7-4ef4-9ea6-be69b0420644. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1732.700460] env[63241]: DEBUG oslo_concurrency.lockutils [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] Acquiring lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.700597] env[63241]: DEBUG oslo_concurrency.lockutils [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] Acquired lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.701910] env[63241]: DEBUG nova.network.neutron [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Refreshing network info cache for port be5471bd-3bc7-4ef4-9ea6-be69b0420644 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1732.805339] env[63241]: DEBUG oslo_vmware.api [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820802, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389796} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.805502] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1732.805702] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1732.805977] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1732.806179] env[63241]: INFO nova.compute.manager [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1732.806834] env[63241]: DEBUG oslo.service.loopingcall [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1732.807390] env[63241]: DEBUG nova.compute.manager [-] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1732.807495] env[63241]: DEBUG nova.network.neutron [-] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1732.897851] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820803, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.979399] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: d60c3a22-19fb-4826-be88-d0307810a079] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1733.032139] env[63241]: DEBUG oslo_concurrency.lockutils [None req-44efa878-64df-46ea-9740-3b44b32c2893 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "31998a62-70f5-4205-89b9-df8312916126" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.097s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.059861] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquiring lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.060051] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquired lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.407013] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820803, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.767794} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.408140] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 864175e0-33f0-429f-bdf6-722d9b00da2b/864175e0-33f0-429f-bdf6-722d9b00da2b.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1733.408424] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1733.408719] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33eaf2a3-6fea-4b19-aa3b-ece7b3959c25 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.416290] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1733.416290] env[63241]: value = "task-1820804" [ 1733.416290] env[63241]: _type = "Task" [ 1733.416290] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.424631] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820804, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.483928] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0115b03b-c828-4e8b-a4d2-c98f8ca69c66] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1733.545926] env[63241]: DEBUG nova.network.neutron [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updated VIF entry in instance network info cache for port ceb9556a-7bff-425f-bfd2-a6dba68ba438. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1733.546055] env[63241]: DEBUG nova.network.neutron [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updating instance_info_cache with network_info: [{"id": "10657b5b-6750-4389-b802-7e6bee8963e7", "address": "fa:16:3e:ea:10:72", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10657b5b-67", "ovs_interfaceid": "10657b5b-6750-4389-b802-7e6bee8963e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ceb9556a-7bff-425f-bfd2-a6dba68ba438", "address": "fa:16:3e:f1:59:43", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapceb9556a-7b", "ovs_interfaceid": "ceb9556a-7bff-425f-bfd2-a6dba68ba438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.550183] env[63241]: DEBUG nova.network.neutron [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updated VIF entry in instance network info cache for port be5471bd-3bc7-4ef4-9ea6-be69b0420644. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1733.550183] env[63241]: DEBUG nova.network.neutron [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance_info_cache with network_info: [{"id": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "address": "fa:16:3e:98:e9:3c", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5471bd-3b", "ovs_interfaceid": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.583106] env[63241]: DEBUG nova.network.neutron [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1733.791301] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128f7d1d-09df-4787-bf36-2c82dc3eefc9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.799422] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0098e39c-0873-4238-b45b-732e0f72897c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.838081] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac3f87d-cf43-4af8-bfa1-809ba1435b36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.844253] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.844253] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.852021] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6e742c-ec2a-4130-a2aa-aa60b042b9dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.865712] env[63241]: DEBUG nova.compute.provider_tree [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1733.896748] env[63241]: DEBUG nova.network.neutron [-] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.928209] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820804, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071435} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.928514] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1733.932018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1707312e-83d0-42d5-ad41-db795c390d1d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.952757] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 864175e0-33f0-429f-bdf6-722d9b00da2b/864175e0-33f0-429f-bdf6-722d9b00da2b.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1733.953059] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d13d48a4-56fb-45ef-81a6-1d7778c53cd7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.972947] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1733.972947] env[63241]: value = "task-1820805" [ 1733.972947] env[63241]: _type = "Task" [ 1733.972947] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.980982] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820805, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.989525] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 41182989-2537-42f0-8c37-792b8b2c5206] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1734.051933] env[63241]: DEBUG oslo_concurrency.lockutils [req-02912a1f-ad04-4c4e-9e7d-4e341c3bcc67 req-df24330a-94bc-4f2d-9a37-5f54ce128dac service nova] Releasing lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.055143] env[63241]: DEBUG oslo_concurrency.lockutils [req-dfe43802-696a-496b-a79c-6a405b223661 req-a4094ca1-71c7-4805-8f10-1bdcaffba242 service nova] Releasing lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.236336] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.237615] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.344885] env[63241]: DEBUG nova.compute.manager [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1734.368903] env[63241]: DEBUG nova.scheduler.client.report [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1734.399016] env[63241]: INFO nova.compute.manager [-] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Took 1.59 seconds to deallocate network for instance. [ 1734.474087] env[63241]: DEBUG nova.compute.manager [req-b79c5739-124f-4d8f-bf8e-3a106a8fc7bc req-48157d38-4a5b-4c9f-8b66-4a558fe341d9 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Received event network-changed-aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1734.474299] env[63241]: DEBUG nova.compute.manager [req-b79c5739-124f-4d8f-bf8e-3a106a8fc7bc req-48157d38-4a5b-4c9f-8b66-4a558fe341d9 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Refreshing instance network info cache due to event network-changed-aac5cae6-1124-4f0a-9270-ff1f4982fff4. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1734.474930] env[63241]: DEBUG oslo_concurrency.lockutils [req-b79c5739-124f-4d8f-bf8e-3a106a8fc7bc req-48157d38-4a5b-4c9f-8b66-4a558fe341d9 service nova] Acquiring lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.478757] env[63241]: DEBUG oslo_concurrency.lockutils [req-b79c5739-124f-4d8f-bf8e-3a106a8fc7bc req-48157d38-4a5b-4c9f-8b66-4a558fe341d9 service nova] Acquired lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.478757] env[63241]: DEBUG nova.network.neutron [req-b79c5739-124f-4d8f-bf8e-3a106a8fc7bc req-48157d38-4a5b-4c9f-8b66-4a558fe341d9 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Refreshing network info cache for port aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1734.492310] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820805, 'name': ReconfigVM_Task, 'duration_secs': 0.442835} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.492654] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: fe8eaeee-56b2-4974-a448-8f95848b3b3a] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1734.494431] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 864175e0-33f0-429f-bdf6-722d9b00da2b/864175e0-33f0-429f-bdf6-722d9b00da2b.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1734.495351] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-192641cd-fa68-4c51-a316-01c457e9a888 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.499193] env[63241]: DEBUG nova.network.neutron [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updating instance_info_cache with network_info: [{"id": "98619b24-0318-422e-90bb-ed8db3309905", "address": "fa:16:3e:2b:bb:d0", "network": {"id": "7b2cb854-43f8-4d73-b16a-16b0e34b29e4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1952186645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89b38fc89ac4f039a89fb9bf42dbc5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98619b24-03", "ovs_interfaceid": "98619b24-0318-422e-90bb-ed8db3309905", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.506257] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1734.506257] env[63241]: value = "task-1820806" [ 1734.506257] env[63241]: _type = "Task" [ 1734.506257] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.514905] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820806, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.687101] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "e28ba013-0bc5-4edc-858d-674980bc8742" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.687489] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.735435] env[63241]: DEBUG nova.compute.manager [req-e93ba84e-355e-4b16-93b2-fa3a30c2b1fe req-e5af1c0e-c4c1-4de2-adce-3f690e0e59a9 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Received event network-changed-98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1734.735624] env[63241]: DEBUG nova.compute.manager [req-e93ba84e-355e-4b16-93b2-fa3a30c2b1fe req-e5af1c0e-c4c1-4de2-adce-3f690e0e59a9 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Refreshing instance network info cache due to event network-changed-98619b24-0318-422e-90bb-ed8db3309905. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1734.735816] env[63241]: DEBUG oslo_concurrency.lockutils [req-e93ba84e-355e-4b16-93b2-fa3a30c2b1fe req-e5af1c0e-c4c1-4de2-adce-3f690e0e59a9 service nova] Acquiring lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.738835] env[63241]: DEBUG nova.compute.manager [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1734.871911] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.877609] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.878180] env[63241]: DEBUG nova.compute.manager [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1734.881044] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.257s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.881044] env[63241]: DEBUG nova.objects.instance [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lazy-loading 'resources' on Instance uuid 73ea6bff-60da-4691-a569-f4e9ae92f701 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1734.905905] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.995844] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 5fce9350-6d45-4bfb-a74b-f5b384ecb16c] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1735.001787] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Releasing lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.002033] env[63241]: DEBUG nova.compute.manager [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Inject network info {{(pid=63241) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1735.002275] env[63241]: DEBUG nova.compute.manager [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] network_info to inject: |[{"id": "98619b24-0318-422e-90bb-ed8db3309905", "address": "fa:16:3e:2b:bb:d0", "network": {"id": "7b2cb854-43f8-4d73-b16a-16b0e34b29e4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1952186645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89b38fc89ac4f039a89fb9bf42dbc5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98619b24-03", "ovs_interfaceid": "98619b24-0318-422e-90bb-ed8db3309905", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1735.007129] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Reconfiguring VM instance to set the machine id {{(pid=63241) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1735.007450] env[63241]: DEBUG oslo_concurrency.lockutils [req-e93ba84e-355e-4b16-93b2-fa3a30c2b1fe req-e5af1c0e-c4c1-4de2-adce-3f690e0e59a9 service nova] Acquired lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.007789] env[63241]: DEBUG nova.network.neutron [req-e93ba84e-355e-4b16-93b2-fa3a30c2b1fe req-e5af1c0e-c4c1-4de2-adce-3f690e0e59a9 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Refreshing network info cache for port 98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1735.009066] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-857ef47f-66b0-41bc-b54a-5c54c2bc609e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.029937] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820806, 'name': Rename_Task, 'duration_secs': 0.235661} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.031485] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1735.031804] env[63241]: DEBUG oslo_vmware.api [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1735.031804] env[63241]: value = "task-1820807" [ 1735.031804] env[63241]: _type = "Task" [ 1735.031804] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.031984] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-060f8803-b8f0-4be9-affc-453824956baf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.045735] env[63241]: DEBUG oslo_vmware.api [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820807, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.046058] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1735.046058] env[63241]: value = "task-1820808" [ 1735.046058] env[63241]: _type = "Task" [ 1735.046058] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.054472] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.189998] env[63241]: DEBUG nova.compute.manager [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1735.211233] env[63241]: DEBUG nova.network.neutron [req-b79c5739-124f-4d8f-bf8e-3a106a8fc7bc req-48157d38-4a5b-4c9f-8b66-4a558fe341d9 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updated VIF entry in instance network info cache for port aac5cae6-1124-4f0a-9270-ff1f4982fff4. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1735.211656] env[63241]: DEBUG nova.network.neutron [req-b79c5739-124f-4d8f-bf8e-3a106a8fc7bc req-48157d38-4a5b-4c9f-8b66-4a558fe341d9 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updating instance_info_cache with network_info: [{"id": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "address": "fa:16:3e:d2:d7:ea", "network": {"id": "d35148d1-0dba-4e39-9bc1-ad561f93e9f8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1579993511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e7a8a40a56ee42dca4190ac78e5f22ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c2b89fb-df8c-47c0-83ae-44291236feb4", "external-id": "nsx-vlan-transportzone-174", "segmentation_id": 174, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac5cae6-11", "ovs_interfaceid": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.248753] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "7f1710d0-857d-41fc-8151-8c5e129dda08" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.249219] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "7f1710d0-857d-41fc-8151-8c5e129dda08" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.249448] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "7f1710d0-857d-41fc-8151-8c5e129dda08-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.249638] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "7f1710d0-857d-41fc-8151-8c5e129dda08-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.249817] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "7f1710d0-857d-41fc-8151-8c5e129dda08-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.253408] env[63241]: INFO nova.compute.manager [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Terminating instance [ 1735.255154] env[63241]: DEBUG nova.compute.manager [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1735.255364] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1735.256299] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3cc134-1206-4d9b-8789-9b4ffd28f0ba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.264929] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1735.265994] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.266248] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bc73c2e-00e5-42a8-99ab-3e3a73355888 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.272922] env[63241]: DEBUG oslo_vmware.api [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1735.272922] env[63241]: value = "task-1820809" [ 1735.272922] env[63241]: _type = "Task" [ 1735.272922] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.281571] env[63241]: DEBUG oslo_vmware.api [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820809, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.344103] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-15a93d52-2bf4-4998-9189-94c3ba81a5ae" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.344396] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-15a93d52-2bf4-4998-9189-94c3ba81a5ae" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.344814] env[63241]: DEBUG nova.objects.instance [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'flavor' on Instance uuid 9d301157-6870-4452-9ae6-0d45c4338886 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1735.384022] env[63241]: DEBUG nova.compute.utils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1735.399662] env[63241]: DEBUG nova.compute.manager [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1735.399662] env[63241]: DEBUG nova.network.neutron [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1735.443359] env[63241]: DEBUG nova.policy [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8f8e170296b46d6a108092608492772', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e139fb67702e42d8a8b2401cc6be9303', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1735.499607] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 11b1888e-95ec-4166-9219-0c38f8817dd4] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1735.544283] env[63241]: DEBUG oslo_vmware.api [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820807, 'name': ReconfigVM_Task, 'duration_secs': 0.1678} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.547244] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-111d2e65-3b4e-457d-a40f-86f3dc1a2725 tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Reconfigured VM instance to set the machine id {{(pid=63241) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1735.564647] env[63241]: DEBUG oslo_vmware.api [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820808, 'name': PowerOnVM_Task, 'duration_secs': 0.465126} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.568653] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1735.568932] env[63241]: INFO nova.compute.manager [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Took 9.01 seconds to spawn the instance on the hypervisor. [ 1735.570567] env[63241]: DEBUG nova.compute.manager [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1735.575037] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b510796-0bb8-40ab-a21d-aa76f628f025 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.714185] env[63241]: DEBUG oslo_concurrency.lockutils [req-b79c5739-124f-4d8f-bf8e-3a106a8fc7bc req-48157d38-4a5b-4c9f-8b66-4a558fe341d9 service nova] Releasing lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.716470] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.787399] env[63241]: DEBUG oslo_vmware.api [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820809, 'name': PowerOffVM_Task, 'duration_secs': 0.212876} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.787670] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1735.787841] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1735.788097] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2840c844-16bf-4c4c-b499-73d232e4356b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.811551] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b725ab-b30e-47df-8b9a-7eef0f46dc18 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.823860] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f335d53d-4771-4413-9a57-1212138bb476 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.864560] env[63241]: DEBUG nova.network.neutron [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Successfully created port: cf92201f-3cfa-4edb-b9dd-b305a4c37115 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1735.866681] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44aa6a24-5815-49de-9212-a248e0655f8e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.875284] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dff48a7-3165-4c2d-ae3e-3da639e33dac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.890317] env[63241]: DEBUG nova.compute.provider_tree [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1735.895156] env[63241]: DEBUG nova.compute.manager [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1735.926622] env[63241]: DEBUG nova.network.neutron [req-e93ba84e-355e-4b16-93b2-fa3a30c2b1fe req-e5af1c0e-c4c1-4de2-adce-3f690e0e59a9 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updated VIF entry in instance network info cache for port 98619b24-0318-422e-90bb-ed8db3309905. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1735.927012] env[63241]: DEBUG nova.network.neutron [req-e93ba84e-355e-4b16-93b2-fa3a30c2b1fe req-e5af1c0e-c4c1-4de2-adce-3f690e0e59a9 service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updating instance_info_cache with network_info: [{"id": "98619b24-0318-422e-90bb-ed8db3309905", "address": "fa:16:3e:2b:bb:d0", "network": {"id": "7b2cb854-43f8-4d73-b16a-16b0e34b29e4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1952186645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89b38fc89ac4f039a89fb9bf42dbc5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98619b24-03", "ovs_interfaceid": "98619b24-0318-422e-90bb-ed8db3309905", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.951818] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1735.952333] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1735.952333] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Deleting the datastore file [datastore1] 7f1710d0-857d-41fc-8151-8c5e129dda08 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1735.953072] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e84d79f0-9b66-41b5-8349-5796fb0aab77 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.959869] env[63241]: DEBUG oslo_vmware.api [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1735.959869] env[63241]: value = "task-1820811" [ 1735.959869] env[63241]: _type = "Task" [ 1735.959869] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.972144] env[63241]: DEBUG oslo_vmware.api [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.003993] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: c8e94dd5-64eb-4c4f-bb45-b49e468cb5c3] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1736.008419] env[63241]: DEBUG nova.objects.instance [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'pci_requests' on Instance uuid 9d301157-6870-4452-9ae6-0d45c4338886 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1736.095635] env[63241]: INFO nova.compute.manager [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Took 18.05 seconds to build instance. [ 1736.393606] env[63241]: DEBUG nova.scheduler.client.report [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1736.434047] env[63241]: DEBUG oslo_concurrency.lockutils [req-e93ba84e-355e-4b16-93b2-fa3a30c2b1fe req-e5af1c0e-c4c1-4de2-adce-3f690e0e59a9 service nova] Releasing lock "refresh_cache-e3842404-2c80-4fa9-b0c9-c58c484845a2" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.475964] env[63241]: DEBUG oslo_vmware.api [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164068} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.475964] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1736.476247] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1736.476668] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1736.477230] env[63241]: INFO nova.compute.manager [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1736.477733] env[63241]: DEBUG oslo.service.loopingcall [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1736.478614] env[63241]: DEBUG nova.compute.manager [-] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1736.478856] env[63241]: DEBUG nova.network.neutron [-] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1736.514702] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0c72c98b-57f0-44e5-9159-490b27eac3a6] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1736.519398] env[63241]: DEBUG nova.objects.base [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Object Instance<9d301157-6870-4452-9ae6-0d45c4338886> lazy-loaded attributes: flavor,pci_requests {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1736.519605] env[63241]: DEBUG nova.network.neutron [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1736.597625] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1483d12b-38d3-46d0-8987-02edd2881f70 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.562s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.657688] env[63241]: DEBUG nova.policy [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54dc853b6f204a75ae7612f9fbd2d1f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecafb0abbdc74501b22b20b797c4c60c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1736.900224] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.019s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.904803] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.462s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.909589] env[63241]: INFO nova.compute.claims [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1736.917505] env[63241]: DEBUG nova.compute.manager [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1736.917505] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquiring lock "e3842404-2c80-4fa9-b0c9-c58c484845a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.917948] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lock "e3842404-2c80-4fa9-b0c9-c58c484845a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.917948] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquiring lock "e3842404-2c80-4fa9-b0c9-c58c484845a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.918104] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lock "e3842404-2c80-4fa9-b0c9-c58c484845a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.918342] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lock "e3842404-2c80-4fa9-b0c9-c58c484845a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.920299] env[63241]: INFO nova.compute.manager [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Terminating instance [ 1736.922357] env[63241]: DEBUG nova.compute.manager [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1736.922578] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1736.923904] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b8c009-1ff0-45bc-b751-01b11712a551 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.932129] env[63241]: DEBUG nova.compute.manager [req-b215a9ab-ef76-46dc-9e0e-97db5a0ed60e req-c91ebf78-ca9f-4a66-a3cd-07c921f436e9 service nova] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Received event network-vif-deleted-febed88f-91b2-4546-82de-5dd1a1f73020 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1736.932450] env[63241]: DEBUG nova.compute.manager [req-b215a9ab-ef76-46dc-9e0e-97db5a0ed60e req-c91ebf78-ca9f-4a66-a3cd-07c921f436e9 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Received event network-changed-aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1736.932669] env[63241]: DEBUG nova.compute.manager [req-b215a9ab-ef76-46dc-9e0e-97db5a0ed60e req-c91ebf78-ca9f-4a66-a3cd-07c921f436e9 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Refreshing instance network info cache due to event network-changed-aac5cae6-1124-4f0a-9270-ff1f4982fff4. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1736.932924] env[63241]: DEBUG oslo_concurrency.lockutils [req-b215a9ab-ef76-46dc-9e0e-97db5a0ed60e req-c91ebf78-ca9f-4a66-a3cd-07c921f436e9 service nova] Acquiring lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.933121] env[63241]: DEBUG oslo_concurrency.lockutils [req-b215a9ab-ef76-46dc-9e0e-97db5a0ed60e req-c91ebf78-ca9f-4a66-a3cd-07c921f436e9 service nova] Acquired lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.933328] env[63241]: DEBUG nova.network.neutron [req-b215a9ab-ef76-46dc-9e0e-97db5a0ed60e req-c91ebf78-ca9f-4a66-a3cd-07c921f436e9 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Refreshing network info cache for port aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1736.938422] env[63241]: INFO nova.scheduler.client.report [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Deleted allocations for instance 73ea6bff-60da-4691-a569-f4e9ae92f701 [ 1736.947276] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1736.949427] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d3bab37-8124-470c-806e-737a558b622a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.957359] env[63241]: DEBUG oslo_vmware.api [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1736.957359] env[63241]: value = "task-1820812" [ 1736.957359] env[63241]: _type = "Task" [ 1736.957359] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.962896] env[63241]: DEBUG nova.virt.hardware [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1736.963201] env[63241]: DEBUG nova.virt.hardware [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1736.963399] env[63241]: DEBUG nova.virt.hardware [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1736.963618] env[63241]: DEBUG nova.virt.hardware [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1736.963797] env[63241]: DEBUG nova.virt.hardware [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1736.963974] env[63241]: DEBUG nova.virt.hardware [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1736.964254] env[63241]: DEBUG nova.virt.hardware [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1736.964501] env[63241]: DEBUG nova.virt.hardware [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1736.964716] env[63241]: DEBUG nova.virt.hardware [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1736.965131] env[63241]: DEBUG nova.virt.hardware [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1736.965369] env[63241]: DEBUG nova.virt.hardware [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1736.967145] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725947de-d279-4210-8b4b-fe65b207ed54 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.982144] env[63241]: DEBUG oslo_vmware.api [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.985904] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b76d49-7bee-4a9c-b2e6-d567e4c27e9a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.999643] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquiring lock "f65e5b00-38b5-4453-b370-1f56f18053eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.999895] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Lock "f65e5b00-38b5-4453-b370-1f56f18053eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.000167] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquiring lock "f65e5b00-38b5-4453-b370-1f56f18053eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.000329] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Lock "f65e5b00-38b5-4453-b370-1f56f18053eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.000513] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Lock "f65e5b00-38b5-4453-b370-1f56f18053eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.003166] env[63241]: INFO nova.compute.manager [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Terminating instance [ 1737.005041] env[63241]: DEBUG nova.compute.manager [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1737.005285] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1737.006069] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eaa17be-0fbd-46ef-893d-651ad063c392 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.014321] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1737.014518] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69d6bd84-16a7-4853-b959-ebb45447d5c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.017507] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: eb506425-4ecc-44b7-afa4-0901fc60b04f] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1737.020400] env[63241]: DEBUG oslo_vmware.api [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1737.020400] env[63241]: value = "task-1820813" [ 1737.020400] env[63241]: _type = "Task" [ 1737.020400] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.028235] env[63241]: DEBUG oslo_vmware.api [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.303153] env[63241]: DEBUG nova.network.neutron [-] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.454567] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2f92c734-c95a-4161-addd-6bd9f631c799 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "73ea6bff-60da-4691-a569-f4e9ae92f701" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.277s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.471210] env[63241]: DEBUG oslo_vmware.api [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820812, 'name': PowerOffVM_Task, 'duration_secs': 0.212663} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.471559] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1737.471763] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1737.472339] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0563c6b4-577e-43e1-9ca3-79a50225d0d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.520693] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 3c51d4dc-5a2c-4483-9aa5-8bab532971d4] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1737.532495] env[63241]: DEBUG oslo_vmware.api [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820813, 'name': PowerOffVM_Task, 'duration_secs': 0.288529} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.532878] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1737.533149] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1737.533483] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3f8ca55-77be-4599-8572-6cc5e3f19320 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.568595] env[63241]: DEBUG nova.network.neutron [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Successfully updated port: cf92201f-3cfa-4edb-b9dd-b305a4c37115 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1737.607240] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1737.607485] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1737.607666] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Deleting the datastore file [datastore1] e3842404-2c80-4fa9-b0c9-c58c484845a2 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1737.607916] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6c48796-54fa-49bf-a30a-eabe06b69a90 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.618263] env[63241]: DEBUG oslo_vmware.api [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for the task: (returnval){ [ 1737.618263] env[63241]: value = "task-1820816" [ 1737.618263] env[63241]: _type = "Task" [ 1737.618263] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.627195] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1737.627418] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1737.627598] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Deleting the datastore file [datastore1] f65e5b00-38b5-4453-b370-1f56f18053eb {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1737.628771] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a98756a-1721-4c49-bb55-9cb6d02b6cd1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.631265] env[63241]: DEBUG nova.compute.manager [req-9739020c-753d-496f-b188-255ce30dd145 req-e903684f-3602-41b9-acca-702e1c999d8d service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Received event network-vif-plugged-cf92201f-3cfa-4edb-b9dd-b305a4c37115 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1737.631466] env[63241]: DEBUG oslo_concurrency.lockutils [req-9739020c-753d-496f-b188-255ce30dd145 req-e903684f-3602-41b9-acca-702e1c999d8d service nova] Acquiring lock "b7378019-a572-4d4d-a82d-cee13a1b6a88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.631672] env[63241]: DEBUG oslo_concurrency.lockutils [req-9739020c-753d-496f-b188-255ce30dd145 req-e903684f-3602-41b9-acca-702e1c999d8d service nova] Lock "b7378019-a572-4d4d-a82d-cee13a1b6a88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.631839] env[63241]: DEBUG oslo_concurrency.lockutils [req-9739020c-753d-496f-b188-255ce30dd145 req-e903684f-3602-41b9-acca-702e1c999d8d service nova] Lock "b7378019-a572-4d4d-a82d-cee13a1b6a88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.632013] env[63241]: DEBUG nova.compute.manager [req-9739020c-753d-496f-b188-255ce30dd145 req-e903684f-3602-41b9-acca-702e1c999d8d service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] No waiting events found dispatching network-vif-plugged-cf92201f-3cfa-4edb-b9dd-b305a4c37115 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1737.632196] env[63241]: WARNING nova.compute.manager [req-9739020c-753d-496f-b188-255ce30dd145 req-e903684f-3602-41b9-acca-702e1c999d8d service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Received unexpected event network-vif-plugged-cf92201f-3cfa-4edb-b9dd-b305a4c37115 for instance with vm_state building and task_state spawning. [ 1737.635846] env[63241]: DEBUG oslo_vmware.api [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.639937] env[63241]: DEBUG oslo_vmware.api [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for the task: (returnval){ [ 1737.639937] env[63241]: value = "task-1820817" [ 1737.639937] env[63241]: _type = "Task" [ 1737.639937] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.648481] env[63241]: DEBUG oslo_vmware.api [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820817, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.731926] env[63241]: DEBUG nova.network.neutron [req-b215a9ab-ef76-46dc-9e0e-97db5a0ed60e req-c91ebf78-ca9f-4a66-a3cd-07c921f436e9 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updated VIF entry in instance network info cache for port aac5cae6-1124-4f0a-9270-ff1f4982fff4. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1737.732304] env[63241]: DEBUG nova.network.neutron [req-b215a9ab-ef76-46dc-9e0e-97db5a0ed60e req-c91ebf78-ca9f-4a66-a3cd-07c921f436e9 service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updating instance_info_cache with network_info: [{"id": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "address": "fa:16:3e:d2:d7:ea", "network": {"id": "d35148d1-0dba-4e39-9bc1-ad561f93e9f8", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1579993511-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e7a8a40a56ee42dca4190ac78e5f22ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c2b89fb-df8c-47c0-83ae-44291236feb4", "external-id": "nsx-vlan-transportzone-174", "segmentation_id": 174, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaac5cae6-11", "ovs_interfaceid": "aac5cae6-1124-4f0a-9270-ff1f4982fff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.806780] env[63241]: INFO nova.compute.manager [-] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Took 1.33 seconds to deallocate network for instance. [ 1737.847076] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "29b6caa8-a07c-494b-b776-b08affa45c87" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.848197] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.025318] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: a88ba00d-6644-4ecc-8603-a7d79ce8a4b4] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1738.072588] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "refresh_cache-b7378019-a572-4d4d-a82d-cee13a1b6a88" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.072981] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired lock "refresh_cache-b7378019-a572-4d4d-a82d-cee13a1b6a88" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.074504] env[63241]: DEBUG nova.network.neutron [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1738.129541] env[63241]: DEBUG oslo_vmware.api [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Task: {'id': task-1820816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16225} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.129877] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1738.131827] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1738.131827] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1738.131827] env[63241]: INFO nova.compute.manager [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1738.131827] env[63241]: DEBUG oslo.service.loopingcall [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1738.131827] env[63241]: DEBUG nova.compute.manager [-] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1738.131827] env[63241]: DEBUG nova.network.neutron [-] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1738.154185] env[63241]: DEBUG oslo_vmware.api [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Task: {'id': task-1820817, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210048} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.158080] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1738.158389] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1738.158781] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1738.158935] env[63241]: INFO nova.compute.manager [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1738.159323] env[63241]: DEBUG oslo.service.loopingcall [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1738.161581] env[63241]: DEBUG nova.compute.manager [-] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1738.161748] env[63241]: DEBUG nova.network.neutron [-] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1738.236284] env[63241]: DEBUG oslo_concurrency.lockutils [req-b215a9ab-ef76-46dc-9e0e-97db5a0ed60e req-c91ebf78-ca9f-4a66-a3cd-07c921f436e9 service nova] Releasing lock "refresh_cache-f65e5b00-38b5-4453-b370-1f56f18053eb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.295893] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c08068-010b-4ecf-8735-cb9d1901d55d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.304542] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6b6d97-3130-455f-b8cb-4e48a80c4ae2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.312652] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.341433] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28099ca1-eee7-42bb-a2e9-35b537e9d703 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.349539] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a72e5b-1be3-4f49-a27f-27b2df1abd28 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.354837] env[63241]: DEBUG nova.compute.utils [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1738.370353] env[63241]: DEBUG nova.compute.provider_tree [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1738.426318] env[63241]: DEBUG nova.network.neutron [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Successfully updated port: 15a93d52-2bf4-4998-9189-94c3ba81a5ae {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1738.529767] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 99eccbef-0e76-4532-af2f-5d74e563e1d2] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1738.606277] env[63241]: DEBUG nova.network.neutron [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1738.703504] env[63241]: DEBUG nova.compute.manager [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Stashing vm_state: active {{(pid=63241) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1738.775790] env[63241]: DEBUG nova.network.neutron [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Updating instance_info_cache with network_info: [{"id": "cf92201f-3cfa-4edb-b9dd-b305a4c37115", "address": "fa:16:3e:fd:b7:2f", "network": {"id": "1e0ff0ab-bb23-4187-abf3-c1d13c2971ac", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-634449129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e139fb67702e42d8a8b2401cc6be9303", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf92201f-3c", "ovs_interfaceid": "cf92201f-3cfa-4edb-b9dd-b305a4c37115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.858455] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.894337] env[63241]: ERROR nova.scheduler.client.report [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [req-03ef7e38-8aa1-4b38-aaf9-042db20f4985] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-03ef7e38-8aa1-4b38-aaf9-042db20f4985"}]} [ 1738.911327] env[63241]: DEBUG nova.scheduler.client.report [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1738.926320] env[63241]: DEBUG nova.scheduler.client.report [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1738.927379] env[63241]: DEBUG nova.compute.provider_tree [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1738.932202] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.932202] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.932202] env[63241]: DEBUG nova.network.neutron [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1738.943174] env[63241]: DEBUG nova.scheduler.client.report [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1738.962848] env[63241]: DEBUG nova.scheduler.client.report [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1739.030290] env[63241]: DEBUG nova.network.neutron [-] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.034137] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 44508cc6-c576-4c30-8559-75118ceba02a] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1739.221435] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.260835] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2225388-e4a7-4e35-9fa7-8a1697bbbf0d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.270386] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4ca35e-1415-4796-a4f2-fcfdecbe510f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.307070] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Releasing lock "refresh_cache-b7378019-a572-4d4d-a82d-cee13a1b6a88" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.307437] env[63241]: DEBUG nova.compute.manager [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Instance network_info: |[{"id": "cf92201f-3cfa-4edb-b9dd-b305a4c37115", "address": "fa:16:3e:fd:b7:2f", "network": {"id": "1e0ff0ab-bb23-4187-abf3-c1d13c2971ac", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-634449129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e139fb67702e42d8a8b2401cc6be9303", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf92201f-3c", "ovs_interfaceid": "cf92201f-3cfa-4edb-b9dd-b305a4c37115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1739.307754] env[63241]: DEBUG nova.network.neutron [-] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.309280] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:b7:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf92201f-3cfa-4edb-b9dd-b305a4c37115', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1739.318505] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Creating folder: Project (e139fb67702e42d8a8b2401cc6be9303). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1739.319897] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9b8b0a-fad5-4efb-8054-962820ed12bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.323585] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ff28082-8d6b-4809-8827-fcde41659888 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.333294] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9ad2a9-3c68-4863-9384-241e5f0fdba9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.338853] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Created folder: Project (e139fb67702e42d8a8b2401cc6be9303) in parent group-v376927. [ 1739.339066] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Creating folder: Instances. Parent ref: group-v377155. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1739.339706] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8063e740-c608-4c29-bfcf-b9aec158a7d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.351454] env[63241]: DEBUG nova.compute.provider_tree [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1739.362057] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Created folder: Instances in parent group-v377155. [ 1739.362316] env[63241]: DEBUG oslo.service.loopingcall [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1739.362513] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1739.362728] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-935a2126-0c61-4fb0-a431-f6564bf42615 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.382960] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1739.382960] env[63241]: value = "task-1820820" [ 1739.382960] env[63241]: _type = "Task" [ 1739.382960] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.392058] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820820, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.486416] env[63241]: WARNING nova.network.neutron [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] cafd3d43-975a-4836-8948-2f1b47e56666 already exists in list: networks containing: ['cafd3d43-975a-4836-8948-2f1b47e56666']. ignoring it [ 1739.486730] env[63241]: WARNING nova.network.neutron [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] cafd3d43-975a-4836-8948-2f1b47e56666 already exists in list: networks containing: ['cafd3d43-975a-4836-8948-2f1b47e56666']. ignoring it [ 1739.537158] env[63241]: INFO nova.compute.manager [-] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Took 1.38 seconds to deallocate network for instance. [ 1739.537593] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: a1f24cfe-88f0-4e73-9ade-2dcf907848a1] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1739.823505] env[63241]: INFO nova.compute.manager [-] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Took 1.69 seconds to deallocate network for instance. [ 1739.892128] env[63241]: DEBUG nova.scheduler.client.report [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 133 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1739.892596] env[63241]: DEBUG nova.compute.provider_tree [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 133 to 134 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1739.892879] env[63241]: DEBUG nova.compute.provider_tree [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1739.904506] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820820, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.933017] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "29b6caa8-a07c-494b-b776-b08affa45c87" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.933728] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.935881] env[63241]: INFO nova.compute.manager [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Attaching volume 7f85c901-777e-4254-8502-a75d490b1a9e to /dev/sdb [ 1739.976953] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34c4af8-6440-453e-a934-6c9f5c4d52eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.990930] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a642a405-7f74-4627-8aeb-3db95bec4073 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.009508] env[63241]: DEBUG nova.virt.block_device [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Updating existing volume attachment record: 6079f1fa-508c-4073-bc9b-ba04687e5401 {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1740.042182] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 97890eda-0c1d-4423-acd2-60d3097c6f8a] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1740.044879] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.081740] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.081740] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.081740] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.081986] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.082420] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.085392] env[63241]: INFO nova.compute.manager [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Terminating instance [ 1740.087782] env[63241]: DEBUG nova.compute.manager [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1740.088967] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1740.088967] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e322e312-bc93-428d-88b3-aa709204e778 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.092969] env[63241]: DEBUG nova.network.neutron [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updating instance_info_cache with network_info: [{"id": "10657b5b-6750-4389-b802-7e6bee8963e7", "address": "fa:16:3e:ea:10:72", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10657b5b-67", "ovs_interfaceid": "10657b5b-6750-4389-b802-7e6bee8963e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ceb9556a-7bff-425f-bfd2-a6dba68ba438", "address": "fa:16:3e:f1:59:43", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapceb9556a-7b", "ovs_interfaceid": "ceb9556a-7bff-425f-bfd2-a6dba68ba438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "15a93d52-2bf4-4998-9189-94c3ba81a5ae", "address": "fa:16:3e:69:de:6c", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15a93d52-2b", "ovs_interfaceid": "15a93d52-2bf4-4998-9189-94c3ba81a5ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.099033] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1740.099282] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f73a4091-88fa-4fb4-a2a7-b4f7f535de41 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.106360] env[63241]: DEBUG oslo_vmware.api [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1740.106360] env[63241]: value = "task-1820821" [ 1740.106360] env[63241]: _type = "Task" [ 1740.106360] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.115354] env[63241]: DEBUG oslo_vmware.api [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820821, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.331240] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.396661] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820820, 'name': CreateVM_Task, 'duration_secs': 0.525128} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.396836] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1740.397557] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.398193] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.398193] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1740.398557] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c606361-366b-4ddf-9938-b605bd688e62 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.400450] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.497s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.404163] env[63241]: DEBUG nova.compute.manager [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1740.404163] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.532s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.404834] env[63241]: INFO nova.compute.claims [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1740.410176] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1740.410176] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528e487b-b3e7-a8eb-70dc-4d5ec6703b89" [ 1740.410176] env[63241]: _type = "Task" [ 1740.410176] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.418262] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528e487b-b3e7-a8eb-70dc-4d5ec6703b89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.472331] env[63241]: DEBUG nova.compute.manager [req-92b84c80-eaa2-4d21-83f8-1c37091bbe9a req-4a00cca0-fd39-454a-afe6-c9a265ff0f9b service nova] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Received event network-vif-deleted-56f1b482-fc2c-45e5-9aca-99ff209a166e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1740.472537] env[63241]: DEBUG nova.compute.manager [req-92b84c80-eaa2-4d21-83f8-1c37091bbe9a req-4a00cca0-fd39-454a-afe6-c9a265ff0f9b service nova] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Received event network-vif-deleted-98619b24-0318-422e-90bb-ed8db3309905 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1740.546700] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: fbbb7682-873d-4bb0-8d39-4aec3566b0af] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1740.597041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.597154] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.597331] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.598318] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66c0992-d176-44e3-9418-f5bba7af80c1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.617483] env[63241]: DEBUG nova.virt.hardware [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1740.617735] env[63241]: DEBUG nova.virt.hardware [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1740.617917] env[63241]: DEBUG nova.virt.hardware [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1740.618135] env[63241]: DEBUG nova.virt.hardware [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1740.618303] env[63241]: DEBUG nova.virt.hardware [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1740.618494] env[63241]: DEBUG nova.virt.hardware [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1740.618732] env[63241]: DEBUG nova.virt.hardware [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1740.618909] env[63241]: DEBUG nova.virt.hardware [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1740.619106] env[63241]: DEBUG nova.virt.hardware [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1740.619286] env[63241]: DEBUG nova.virt.hardware [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1740.619473] env[63241]: DEBUG nova.virt.hardware [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1740.626256] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Reconfiguring VM to attach interface {{(pid=63241) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1740.630020] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db5d69b3-ec6d-43cb-8330-010081ef2815 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.648992] env[63241]: DEBUG oslo_vmware.api [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820821, 'name': PowerOffVM_Task, 'duration_secs': 0.195984} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.653323] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1740.653590] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1740.654209] env[63241]: DEBUG oslo_vmware.api [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1740.654209] env[63241]: value = "task-1820825" [ 1740.654209] env[63241]: _type = "Task" [ 1740.654209] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.654419] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66ebdfae-5ca4-482d-89d9-1a92376bf66b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.669699] env[63241]: DEBUG oslo_vmware.api [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820825, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.784794] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1740.785061] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1740.785230] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Deleting the datastore file [datastore1] f372d405-f7d5-4e5f-8c36-fe9651af2a0d {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1740.786776] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0adb522-e04f-4511-a274-33ec7cd87135 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.792587] env[63241]: DEBUG oslo_vmware.api [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for the task: (returnval){ [ 1740.792587] env[63241]: value = "task-1820827" [ 1740.792587] env[63241]: _type = "Task" [ 1740.792587] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.800813] env[63241]: DEBUG oslo_vmware.api [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.894619] env[63241]: DEBUG nova.compute.manager [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Received event network-changed-cf92201f-3cfa-4edb-b9dd-b305a4c37115 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1740.894825] env[63241]: DEBUG nova.compute.manager [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Refreshing instance network info cache due to event network-changed-cf92201f-3cfa-4edb-b9dd-b305a4c37115. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1740.895740] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] Acquiring lock "refresh_cache-b7378019-a572-4d4d-a82d-cee13a1b6a88" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.895740] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] Acquired lock "refresh_cache-b7378019-a572-4d4d-a82d-cee13a1b6a88" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.895740] env[63241]: DEBUG nova.network.neutron [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Refreshing network info cache for port cf92201f-3cfa-4edb-b9dd-b305a4c37115 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1740.910170] env[63241]: DEBUG nova.compute.utils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1740.911734] env[63241]: DEBUG nova.compute.manager [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1740.911928] env[63241]: DEBUG nova.network.neutron [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1740.927078] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528e487b-b3e7-a8eb-70dc-4d5ec6703b89, 'name': SearchDatastore_Task, 'duration_secs': 0.010183} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.927277] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.927399] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1740.927633] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.927773] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.927981] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1740.928512] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d59e0189-c809-471a-bb44-03ff89b9162b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.937403] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1740.937627] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1740.938460] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfc1c926-80a5-4d59-92a6-7990ff05a337 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.943939] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1740.943939] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5249aea4-7ce9-ae44-a8d9-bf59006a9aeb" [ 1740.943939] env[63241]: _type = "Task" [ 1740.943939] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.952673] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5249aea4-7ce9-ae44-a8d9-bf59006a9aeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.020359] env[63241]: DEBUG nova.policy [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f52bf5741a490c83e01e06f686559e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c64d07a686b414f93ec4c599307498f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1741.050343] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0e5447fd-a04f-4bc2-b329-e015883773b8] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1741.166417] env[63241]: DEBUG oslo_vmware.api [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820825, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.303758] env[63241]: DEBUG oslo_vmware.api [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Task: {'id': task-1820827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158015} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.303990] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1741.303990] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1741.304219] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1741.304425] env[63241]: INFO nova.compute.manager [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1741.304702] env[63241]: DEBUG oslo.service.loopingcall [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1741.304929] env[63241]: DEBUG nova.compute.manager [-] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1741.307160] env[63241]: DEBUG nova.network.neutron [-] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1741.417590] env[63241]: DEBUG nova.compute.manager [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1741.459794] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5249aea4-7ce9-ae44-a8d9-bf59006a9aeb, 'name': SearchDatastore_Task, 'duration_secs': 0.01086} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.467206] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0f2eb46-0cf5-452e-836e-9e4ab707f5df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.478469] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1741.478469] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e3e2f0-a8cc-82f1-bc0c-0d078b28d570" [ 1741.478469] env[63241]: _type = "Task" [ 1741.478469] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.493443] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e3e2f0-a8cc-82f1-bc0c-0d078b28d570, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.555902] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: a1a8342a-b00e-42c1-8c01-a95659a78caf] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1741.670718] env[63241]: DEBUG oslo_vmware.api [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820825, 'name': ReconfigVM_Task, 'duration_secs': 0.635358} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.671232] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1741.671630] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Reconfigured VM to attach interface {{(pid=63241) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1741.751147] env[63241]: DEBUG nova.network.neutron [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Updated VIF entry in instance network info cache for port cf92201f-3cfa-4edb-b9dd-b305a4c37115. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1741.751460] env[63241]: DEBUG nova.network.neutron [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Updating instance_info_cache with network_info: [{"id": "cf92201f-3cfa-4edb-b9dd-b305a4c37115", "address": "fa:16:3e:fd:b7:2f", "network": {"id": "1e0ff0ab-bb23-4187-abf3-c1d13c2971ac", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-634449129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e139fb67702e42d8a8b2401cc6be9303", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf92201f-3c", "ovs_interfaceid": "cf92201f-3cfa-4edb-b9dd-b305a4c37115", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.793278] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0be146-2d8d-4330-b0a3-eeb5b28e43cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.801458] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc1b8a1-91fa-4961-82ee-f14f8a919c42 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.837381] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fdff67-b03a-4fbf-ba37-3ee3e32f6ad3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.845448] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2cb56c-a843-46a7-a54d-d3d8a9822587 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.860338] env[63241]: DEBUG nova.compute.provider_tree [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1741.894459] env[63241]: DEBUG nova.network.neutron [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Successfully created port: ead55549-b686-405a-a2d0-8a995905158b {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1741.992805] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e3e2f0-a8cc-82f1-bc0c-0d078b28d570, 'name': SearchDatastore_Task, 'duration_secs': 0.051491} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.992975] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1741.993246] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b7378019-a572-4d4d-a82d-cee13a1b6a88/b7378019-a572-4d4d-a82d-cee13a1b6a88.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1741.993501] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8d4266f-8883-4d27-b7d4-cbcde704bdfc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.000229] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1742.000229] env[63241]: value = "task-1820828" [ 1742.000229] env[63241]: _type = "Task" [ 1742.000229] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.007933] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820828, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.059203] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 780f3eee-f6c7-4054-8e6e-a370f74dc405] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1742.182255] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ff525236-006c-4c04-aecd-82685b31ae02 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-15a93d52-2bf4-4998-9189-94c3ba81a5ae" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.837s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.254325] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] Releasing lock "refresh_cache-b7378019-a572-4d4d-a82d-cee13a1b6a88" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.254704] env[63241]: DEBUG nova.compute.manager [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received event network-vif-plugged-15a93d52-2bf4-4998-9189-94c3ba81a5ae {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1742.254981] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] Acquiring lock "9d301157-6870-4452-9ae6-0d45c4338886-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.255719] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] Lock "9d301157-6870-4452-9ae6-0d45c4338886-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.255719] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] Lock "9d301157-6870-4452-9ae6-0d45c4338886-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.255927] env[63241]: DEBUG nova.compute.manager [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] No waiting events found dispatching network-vif-plugged-15a93d52-2bf4-4998-9189-94c3ba81a5ae {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1742.256230] env[63241]: WARNING nova.compute.manager [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received unexpected event network-vif-plugged-15a93d52-2bf4-4998-9189-94c3ba81a5ae for instance with vm_state active and task_state None. [ 1742.256492] env[63241]: DEBUG nova.compute.manager [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received event network-changed-15a93d52-2bf4-4998-9189-94c3ba81a5ae {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1742.256697] env[63241]: DEBUG nova.compute.manager [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Refreshing instance network info cache due to event network-changed-15a93d52-2bf4-4998-9189-94c3ba81a5ae. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1742.256904] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] Acquiring lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.257315] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] Acquired lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.257650] env[63241]: DEBUG nova.network.neutron [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Refreshing network info cache for port 15a93d52-2bf4-4998-9189-94c3ba81a5ae {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1742.269833] env[63241]: DEBUG nova.network.neutron [-] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1742.363542] env[63241]: DEBUG nova.scheduler.client.report [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1742.427935] env[63241]: DEBUG nova.compute.manager [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1742.472842] env[63241]: DEBUG nova.virt.hardware [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1742.474246] env[63241]: DEBUG nova.virt.hardware [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1742.474246] env[63241]: DEBUG nova.virt.hardware [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1742.474666] env[63241]: DEBUG nova.virt.hardware [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1742.474666] env[63241]: DEBUG nova.virt.hardware [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1742.474666] env[63241]: DEBUG nova.virt.hardware [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1742.474911] env[63241]: DEBUG nova.virt.hardware [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1742.475021] env[63241]: DEBUG nova.virt.hardware [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1742.475205] env[63241]: DEBUG nova.virt.hardware [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1742.475374] env[63241]: DEBUG nova.virt.hardware [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1742.475670] env[63241]: DEBUG nova.virt.hardware [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1742.476659] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f9bccb-c104-4dd2-b51d-3b8aa9aadf3b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.490206] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f530720e-fe3d-47e3-81f3-6fc72aa96de1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.519175] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820828, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487632} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.521960] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b7378019-a572-4d4d-a82d-cee13a1b6a88/b7378019-a572-4d4d-a82d-cee13a1b6a88.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1742.522251] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1742.522601] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad79c661-fcd4-4214-949e-29372009f424 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.529529] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1742.529529] env[63241]: value = "task-1820829" [ 1742.529529] env[63241]: _type = "Task" [ 1742.529529] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.539112] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820829, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.562658] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0440c0a8-f065-4a82-b190-33279e7c0d93] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1742.771927] env[63241]: INFO nova.compute.manager [-] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Took 1.47 seconds to deallocate network for instance. [ 1742.870700] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.871412] env[63241]: DEBUG nova.compute.manager [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1742.877959] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.972s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.878253] env[63241]: DEBUG nova.objects.instance [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lazy-loading 'resources' on Instance uuid f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1743.042729] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820829, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.222419} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.043026] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1743.044845] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98faf24-cac6-4dfa-9a53-ce04be5d6232 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.069375] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] b7378019-a572-4d4d-a82d-cee13a1b6a88/b7378019-a572-4d4d-a82d-cee13a1b6a88.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1743.070384] env[63241]: DEBUG nova.network.neutron [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updated VIF entry in instance network info cache for port 15a93d52-2bf4-4998-9189-94c3ba81a5ae. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1743.070826] env[63241]: DEBUG nova.network.neutron [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updating instance_info_cache with network_info: [{"id": "10657b5b-6750-4389-b802-7e6bee8963e7", "address": "fa:16:3e:ea:10:72", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10657b5b-67", "ovs_interfaceid": "10657b5b-6750-4389-b802-7e6bee8963e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ceb9556a-7bff-425f-bfd2-a6dba68ba438", "address": "fa:16:3e:f1:59:43", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapceb9556a-7b", "ovs_interfaceid": "ceb9556a-7bff-425f-bfd2-a6dba68ba438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "15a93d52-2bf4-4998-9189-94c3ba81a5ae", "address": "fa:16:3e:69:de:6c", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15a93d52-2b", "ovs_interfaceid": "15a93d52-2bf4-4998-9189-94c3ba81a5ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.073140] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: b4182e53-50db-4256-b376-b00100778935] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1743.082756] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c915a083-bf79-4c7b-a237-61a97323bff4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.104819] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1743.104819] env[63241]: value = "task-1820831" [ 1743.104819] env[63241]: _type = "Task" [ 1743.104819] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.117051] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820831, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.130057] env[63241]: DEBUG nova.compute.manager [req-1cea9292-ee0a-4ae4-be7e-ec7bee64ca94 req-03d5ae32-efec-4967-bdb9-3c527e40f170 service nova] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Received event network-vif-deleted-68ec05cb-7eaf-4904-b491-0f5d3bb27936 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1743.282847] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.383956] env[63241]: DEBUG nova.compute.utils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1743.390208] env[63241]: DEBUG nova.compute.manager [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1743.390481] env[63241]: DEBUG nova.network.neutron [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1743.449440] env[63241]: DEBUG nova.policy [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de6df2e8caaa4c0c82c94f9d107a8e17', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6964b0dd75c4704b8f5cacd2c8e355f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1743.580669] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.580909] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances with incomplete migration {{(pid=63241) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1743.598910] env[63241]: DEBUG oslo_concurrency.lockutils [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] Releasing lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.599232] env[63241]: DEBUG nova.compute.manager [req-6f2f1465-1e57-4992-88fb-a1df6bbcdd8a req-64143bcd-7e4b-4c96-bea8-4eac99f424bc service nova] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Received event network-vif-deleted-aac5cae6-1124-4f0a-9270-ff1f4982fff4 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1743.618955] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820831, 'name': ReconfigVM_Task, 'duration_secs': 0.340796} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.622340] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Reconfigured VM instance instance-00000058 to attach disk [datastore1] b7378019-a572-4d4d-a82d-cee13a1b6a88/b7378019-a572-4d4d-a82d-cee13a1b6a88.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1743.623622] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-706388c5-12a9-43e5-a362-b94767864d03 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.630827] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1743.630827] env[63241]: value = "task-1820832" [ 1743.630827] env[63241]: _type = "Task" [ 1743.630827] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.644692] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820832, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.780522] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8258aa05-8ea4-471a-9ada-17dd59b0d247 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.791780] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ce12da-c880-47d2-9c0a-eee825293c65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.829479] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b118e46-c715-41a7-aa2a-edf7c2c9359e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.841225] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd067a60-4eb5-4be2-9266-b2487c361161 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.858873] env[63241]: DEBUG nova.compute.provider_tree [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1743.890499] env[63241]: DEBUG nova.compute.manager [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1743.989929] env[63241]: DEBUG nova.compute.manager [req-c61107da-de2f-4b68-a55a-1a2463310c70 req-3361de39-0e63-48a6-9aee-117f8c46f6f5 service nova] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Received event network-vif-plugged-ead55549-b686-405a-a2d0-8a995905158b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1743.989929] env[63241]: DEBUG oslo_concurrency.lockutils [req-c61107da-de2f-4b68-a55a-1a2463310c70 req-3361de39-0e63-48a6-9aee-117f8c46f6f5 service nova] Acquiring lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.990056] env[63241]: DEBUG oslo_concurrency.lockutils [req-c61107da-de2f-4b68-a55a-1a2463310c70 req-3361de39-0e63-48a6-9aee-117f8c46f6f5 service nova] Lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.990287] env[63241]: DEBUG oslo_concurrency.lockutils [req-c61107da-de2f-4b68-a55a-1a2463310c70 req-3361de39-0e63-48a6-9aee-117f8c46f6f5 service nova] Lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.990387] env[63241]: DEBUG nova.compute.manager [req-c61107da-de2f-4b68-a55a-1a2463310c70 req-3361de39-0e63-48a6-9aee-117f8c46f6f5 service nova] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] No waiting events found dispatching network-vif-plugged-ead55549-b686-405a-a2d0-8a995905158b {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1743.990564] env[63241]: WARNING nova.compute.manager [req-c61107da-de2f-4b68-a55a-1a2463310c70 req-3361de39-0e63-48a6-9aee-117f8c46f6f5 service nova] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Received unexpected event network-vif-plugged-ead55549-b686-405a-a2d0-8a995905158b for instance with vm_state building and task_state spawning. [ 1744.086539] env[63241]: DEBUG nova.network.neutron [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Successfully created port: a6b9cb89-6a29-44b5-91b1-0591266c582b {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1744.087206] env[63241]: DEBUG nova.network.neutron [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Successfully updated port: ead55549-b686-405a-a2d0-8a995905158b {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1744.088344] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.143184] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820832, 'name': Rename_Task, 'duration_secs': 0.160369} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.143599] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1744.143907] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d3c6ad0-71d6-40bd-b99c-9561d8b74d68 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.150571] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1744.150571] env[63241]: value = "task-1820833" [ 1744.150571] env[63241]: _type = "Task" [ 1744.150571] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.158872] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820833, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.335356] env[63241]: DEBUG oslo_concurrency.lockutils [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-ceb9556a-7bff-425f-bfd2-a6dba68ba438" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.335356] env[63241]: DEBUG oslo_concurrency.lockutils [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-ceb9556a-7bff-425f-bfd2-a6dba68ba438" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.362525] env[63241]: DEBUG nova.scheduler.client.report [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1744.594761] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "refresh_cache-6b4debb5-5a83-45f7-bcf2-36a10f95f644" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.594906] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "refresh_cache-6b4debb5-5a83-45f7-bcf2-36a10f95f644" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.596151] env[63241]: DEBUG nova.network.neutron [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1744.660635] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820833, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.844114] env[63241]: DEBUG oslo_concurrency.lockutils [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.844114] env[63241]: DEBUG oslo_concurrency.lockutils [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.844114] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab307410-37f6-449e-8221-d5af99bd00c1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.863735] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654d5371-43cd-4a28-acf2-ea344278587d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.892009] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.014s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.899527] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Reconfiguring VM to detach interface {{(pid=63241) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1744.900145] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.634s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.901714] env[63241]: INFO nova.compute.claims [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1744.905084] env[63241]: DEBUG nova.compute.manager [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1744.906985] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5813b15c-19f4-4783-8e4b-b7fd3153a4eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.921370] env[63241]: INFO nova.scheduler.client.report [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted allocations for instance f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067 [ 1744.929280] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1744.929280] env[63241]: value = "task-1820834" [ 1744.929280] env[63241]: _type = "Task" [ 1744.929280] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.937939] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.945988] env[63241]: DEBUG nova.virt.hardware [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1744.946295] env[63241]: DEBUG nova.virt.hardware [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1744.946466] env[63241]: DEBUG nova.virt.hardware [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1744.946654] env[63241]: DEBUG nova.virt.hardware [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1744.946804] env[63241]: DEBUG nova.virt.hardware [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1744.946950] env[63241]: DEBUG nova.virt.hardware [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1744.947177] env[63241]: DEBUG nova.virt.hardware [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1744.947398] env[63241]: DEBUG nova.virt.hardware [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1744.947543] env[63241]: DEBUG nova.virt.hardware [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1744.947707] env[63241]: DEBUG nova.virt.hardware [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1744.947882] env[63241]: DEBUG nova.virt.hardware [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1744.948970] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2829a4c-b3ea-4682-9169-b98f593503c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.957207] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2092fa8c-b616-41f1-aaed-88e492adb631 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.069020] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1745.069020] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377159', 'volume_id': '7f85c901-777e-4254-8502-a75d490b1a9e', 'name': 'volume-7f85c901-777e-4254-8502-a75d490b1a9e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '29b6caa8-a07c-494b-b776-b08affa45c87', 'attached_at': '', 'detached_at': '', 'volume_id': '7f85c901-777e-4254-8502-a75d490b1a9e', 'serial': '7f85c901-777e-4254-8502-a75d490b1a9e'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1745.069020] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df3649b-8687-4d04-9804-0d344185150a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.094018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4e2283-569e-4d74-9dd0-4d7346d4f8d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.124507] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] volume-7f85c901-777e-4254-8502-a75d490b1a9e/volume-7f85c901-777e-4254-8502-a75d490b1a9e.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1745.125149] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9712f00-7a61-4e43-9408-506164aaa8cc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.143941] env[63241]: DEBUG oslo_vmware.api [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1745.143941] env[63241]: value = "task-1820835" [ 1745.143941] env[63241]: _type = "Task" [ 1745.143941] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.152494] env[63241]: DEBUG oslo_vmware.api [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820835, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.162873] env[63241]: DEBUG oslo_vmware.api [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820833, 'name': PowerOnVM_Task, 'duration_secs': 0.540494} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.163328] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1745.163654] env[63241]: INFO nova.compute.manager [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Took 8.25 seconds to spawn the instance on the hypervisor. [ 1745.164803] env[63241]: DEBUG nova.compute.manager [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1745.164803] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e1a9bd-4fe4-4de7-82f7-40c1c95b8532 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.195463] env[63241]: DEBUG nova.network.neutron [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1745.439985] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92add6cf-806a-43f2-98d9-e988c9259d6b tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.867s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.446864] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.457708] env[63241]: DEBUG nova.network.neutron [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Updating instance_info_cache with network_info: [{"id": "ead55549-b686-405a-a2d0-8a995905158b", "address": "fa:16:3e:77:e8:9e", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapead55549-b6", "ovs_interfaceid": "ead55549-b686-405a-a2d0-8a995905158b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1745.654333] env[63241]: DEBUG oslo_vmware.api [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820835, 'name': ReconfigVM_Task, 'duration_secs': 0.415041} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.654642] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Reconfigured VM instance instance-00000047 to attach disk [datastore1] volume-7f85c901-777e-4254-8502-a75d490b1a9e/volume-7f85c901-777e-4254-8502-a75d490b1a9e.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1745.659475] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18654f40-ba27-4de4-821e-3b4fdc6aece3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.680060] env[63241]: DEBUG oslo_vmware.api [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1745.680060] env[63241]: value = "task-1820836" [ 1745.680060] env[63241]: _type = "Task" [ 1745.680060] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.683610] env[63241]: INFO nova.compute.manager [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Took 17.10 seconds to build instance. [ 1745.690766] env[63241]: DEBUG oslo_vmware.api [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820836, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.919685] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "037f539f-1bf1-4897-81b3-08c377b92211" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.920097] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.946470] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.961827] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "refresh_cache-6b4debb5-5a83-45f7-bcf2-36a10f95f644" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.962186] env[63241]: DEBUG nova.compute.manager [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Instance network_info: |[{"id": "ead55549-b686-405a-a2d0-8a995905158b", "address": "fa:16:3e:77:e8:9e", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapead55549-b6", "ovs_interfaceid": "ead55549-b686-405a-a2d0-8a995905158b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1745.962603] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:e8:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dacd109c-2442-41b8-b612-7ed3efbdaa94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ead55549-b686-405a-a2d0-8a995905158b', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1745.971546] env[63241]: DEBUG oslo.service.loopingcall [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1745.975528] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1745.975958] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99c0e79e-b500-495f-b2ce-e0fd623a8388 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.005579] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1746.005579] env[63241]: value = "task-1820837" [ 1746.005579] env[63241]: _type = "Task" [ 1746.005579] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.016872] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.182304] env[63241]: DEBUG nova.compute.manager [req-037c4e60-e730-45a2-8274-c46427a72018 req-d8c7a519-0cc6-4aa2-a651-eaf6c5e0c933 service nova] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Received event network-changed-ead55549-b686-405a-a2d0-8a995905158b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1746.182304] env[63241]: DEBUG nova.compute.manager [req-037c4e60-e730-45a2-8274-c46427a72018 req-d8c7a519-0cc6-4aa2-a651-eaf6c5e0c933 service nova] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Refreshing instance network info cache due to event network-changed-ead55549-b686-405a-a2d0-8a995905158b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1746.182304] env[63241]: DEBUG oslo_concurrency.lockutils [req-037c4e60-e730-45a2-8274-c46427a72018 req-d8c7a519-0cc6-4aa2-a651-eaf6c5e0c933 service nova] Acquiring lock "refresh_cache-6b4debb5-5a83-45f7-bcf2-36a10f95f644" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1746.182304] env[63241]: DEBUG oslo_concurrency.lockutils [req-037c4e60-e730-45a2-8274-c46427a72018 req-d8c7a519-0cc6-4aa2-a651-eaf6c5e0c933 service nova] Acquired lock "refresh_cache-6b4debb5-5a83-45f7-bcf2-36a10f95f644" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1746.182304] env[63241]: DEBUG nova.network.neutron [req-037c4e60-e730-45a2-8274-c46427a72018 req-d8c7a519-0cc6-4aa2-a651-eaf6c5e0c933 service nova] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Refreshing network info cache for port ead55549-b686-405a-a2d0-8a995905158b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1746.189306] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e2344cb4-adc8-4b18-b213-23412aa7a81a tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "b7378019-a572-4d4d-a82d-cee13a1b6a88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.621s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.199678] env[63241]: DEBUG oslo_vmware.api [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820836, 'name': ReconfigVM_Task, 'duration_secs': 0.163703} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.203095] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377159', 'volume_id': '7f85c901-777e-4254-8502-a75d490b1a9e', 'name': 'volume-7f85c901-777e-4254-8502-a75d490b1a9e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '29b6caa8-a07c-494b-b776-b08affa45c87', 'attached_at': '', 'detached_at': '', 'volume_id': '7f85c901-777e-4254-8502-a75d490b1a9e', 'serial': '7f85c901-777e-4254-8502-a75d490b1a9e'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1746.260633] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6c38cc-61e2-4176-ae25-38e8d25d1a7d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.268753] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a723e640-1a4e-4e72-aa40-80ef9d0b6952 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.299439] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c9486f-b93e-4e08-8ecd-0b748d5aa90a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.308455] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb39f627-85f6-48f6-8d30-f3b46d5d88ad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.322373] env[63241]: DEBUG nova.compute.provider_tree [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1746.423114] env[63241]: DEBUG nova.compute.manager [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1746.446915] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.515800] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.738774] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.738774] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.826596] env[63241]: DEBUG nova.scheduler.client.report [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1746.946375] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.947418] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.017110] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.240735] env[63241]: DEBUG nova.compute.manager [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1747.256295] env[63241]: DEBUG nova.objects.instance [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'flavor' on Instance uuid 29b6caa8-a07c-494b-b776-b08affa45c87 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1747.272765] env[63241]: DEBUG nova.network.neutron [req-037c4e60-e730-45a2-8274-c46427a72018 req-d8c7a519-0cc6-4aa2-a651-eaf6c5e0c933 service nova] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Updated VIF entry in instance network info cache for port ead55549-b686-405a-a2d0-8a995905158b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1747.273127] env[63241]: DEBUG nova.network.neutron [req-037c4e60-e730-45a2-8274-c46427a72018 req-d8c7a519-0cc6-4aa2-a651-eaf6c5e0c933 service nova] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Updating instance_info_cache with network_info: [{"id": "ead55549-b686-405a-a2d0-8a995905158b", "address": "fa:16:3e:77:e8:9e", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapead55549-b6", "ovs_interfaceid": "ead55549-b686-405a-a2d0-8a995905158b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.332211] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.332745] env[63241]: DEBUG nova.compute.manager [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1747.335442] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.619s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.337479] env[63241]: INFO nova.compute.claims [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1747.447503] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.517104] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.728493] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "29b6caa8-a07c-494b-b776-b08affa45c87" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.761421] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9093df8-5410-4a29-9c68-7a37f30255f7 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.827s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.762361] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.034s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.762556] env[63241]: DEBUG nova.compute.manager [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1747.763440] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d637066-7547-49d8-9fa8-0cb68588dd94 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.768182] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.772280] env[63241]: DEBUG nova.compute.manager [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63241) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1747.772820] env[63241]: DEBUG nova.objects.instance [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'flavor' on Instance uuid 29b6caa8-a07c-494b-b776-b08affa45c87 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1747.775732] env[63241]: DEBUG oslo_concurrency.lockutils [req-037c4e60-e730-45a2-8274-c46427a72018 req-d8c7a519-0cc6-4aa2-a651-eaf6c5e0c933 service nova] Releasing lock "refresh_cache-6b4debb5-5a83-45f7-bcf2-36a10f95f644" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1747.844580] env[63241]: DEBUG nova.compute.utils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1747.846657] env[63241]: DEBUG nova.compute.manager [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1747.846759] env[63241]: DEBUG nova.network.neutron [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1747.943792] env[63241]: DEBUG nova.policy [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac0c578d40af405b8fe206fcd309cf0a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6c76b46a4cf4a32a4a1c25fb81a963d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1747.950626] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.018952] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.279870] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1748.279870] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e8364be-0af3-4df1-bf07-138ed6e18215 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.287027] env[63241]: DEBUG oslo_vmware.api [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1748.287027] env[63241]: value = "task-1820838" [ 1748.287027] env[63241]: _type = "Task" [ 1748.287027] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.293895] env[63241]: DEBUG oslo_vmware.api [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820838, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.351648] env[63241]: DEBUG nova.compute.manager [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1748.450946] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.519975] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.539437] env[63241]: DEBUG nova.network.neutron [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Successfully created port: a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1748.685963] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf901d55-35b4-4287-b82e-e0c7196ac864 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.694300] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47a2606-9485-47e6-b6a7-fa011feb9de9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.727584] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7dac17-e3ed-4748-ab5e-af640360081e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.735412] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5e5005-9779-4377-82fc-d11662870bcb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.748415] env[63241]: DEBUG nova.compute.provider_tree [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1748.796667] env[63241]: DEBUG oslo_vmware.api [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820838, 'name': PowerOffVM_Task, 'duration_secs': 0.200754} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.796951] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1748.797158] env[63241]: DEBUG nova.compute.manager [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1748.798023] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05397963-7a01-4a09-afc7-e0eea9a1cd2c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.952260] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.018813] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.251743] env[63241]: DEBUG nova.scheduler.client.report [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1749.312472] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bfd868cb-bcb4-4981-8e87-6a3181241daa tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.550s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.364929] env[63241]: DEBUG nova.compute.manager [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1749.392357] env[63241]: DEBUG nova.virt.hardware [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1749.392630] env[63241]: DEBUG nova.virt.hardware [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1749.392794] env[63241]: DEBUG nova.virt.hardware [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1749.392979] env[63241]: DEBUG nova.virt.hardware [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1749.393982] env[63241]: DEBUG nova.virt.hardware [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1749.393982] env[63241]: DEBUG nova.virt.hardware [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1749.393982] env[63241]: DEBUG nova.virt.hardware [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1749.393982] env[63241]: DEBUG nova.virt.hardware [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1749.394215] env[63241]: DEBUG nova.virt.hardware [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1749.394215] env[63241]: DEBUG nova.virt.hardware [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1749.394498] env[63241]: DEBUG nova.virt.hardware [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1749.396988] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b08a955-ef9e-4964-b3be-78f23f525367 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.409689] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4db60e2-fddf-456a-96a9-47cd5fd24517 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.452027] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.520782] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.757144] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.757790] env[63241]: DEBUG nova.compute.manager [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1749.760825] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.448s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.761150] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.763280] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 10.542s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.788745] env[63241]: INFO nova.scheduler.client.report [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Deleted allocations for instance 7f1710d0-857d-41fc-8151-8c5e129dda08 [ 1749.953305] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.023315] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.088917] env[63241]: DEBUG nova.compute.manager [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1750.089868] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0314f6-d878-41c3-a4ef-27251b2004e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.269022] env[63241]: DEBUG nova.compute.utils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1750.272103] env[63241]: INFO nova.compute.claims [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1750.278462] env[63241]: DEBUG nova.compute.manager [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1750.281016] env[63241]: DEBUG nova.network.neutron [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1750.299319] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2731f28c-9790-41f8-8692-d9cb0860ee85 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "7f1710d0-857d-41fc-8151-8c5e129dda08" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.050s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.348725] env[63241]: DEBUG nova.policy [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c00391826fd242709ad7947610554fc2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '855da29218ba4391a208e2835f60ee11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1750.454692] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.525989] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.603109] env[63241]: INFO nova.compute.manager [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] instance snapshotting [ 1750.608572] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323e3b36-54a7-4830-a050-8f0f085c8601 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.616400] env[63241]: DEBUG nova.network.neutron [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Successfully created port: 6be29b8b-a0d6-4346-b774-5faf878f177c {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1750.633335] env[63241]: DEBUG nova.objects.instance [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'flavor' on Instance uuid 29b6caa8-a07c-494b-b776-b08affa45c87 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1750.638816] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacc155b-9275-43c7-8222-135486ce1812 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.778812] env[63241]: DEBUG nova.compute.manager [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1750.782746] env[63241]: INFO nova.compute.resource_tracker [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating resource usage from migration 4888f213-32fb-497d-9453-837f8f1b279f [ 1750.958976] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.022287] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.118018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd742551-42ce-4d9b-94f8-3eb042cb185c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.124662] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b50ebf3-3823-4a55-9eb4-9def6dafb28c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.158500] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.158975] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquired lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.159318] env[63241]: DEBUG nova.network.neutron [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1751.159629] env[63241]: DEBUG nova.objects.instance [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'info_cache' on Instance uuid 29b6caa8-a07c-494b-b776-b08affa45c87 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1751.161944] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1751.166139] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-597d15f2-e322-469b-b66f-870692979ae8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.166139] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f519f6a-42e7-4821-a7ac-8ebe7ca332f2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.174745] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676fa331-e60a-477c-b807-e3f12d5f8d32 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.178798] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1751.178798] env[63241]: value = "task-1820839" [ 1751.178798] env[63241]: _type = "Task" [ 1751.178798] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.192882] env[63241]: DEBUG nova.compute.provider_tree [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1751.200321] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820839, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.458808] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.523059] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.551007] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.551186] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.551373] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.551497] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.551670] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.553962] env[63241]: INFO nova.compute.manager [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Terminating instance [ 1751.556054] env[63241]: DEBUG nova.compute.manager [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1751.556296] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1751.557373] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b288d2-b101-4906-981c-c1b5552a5b1c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.565122] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1751.565383] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-068d7f22-c666-4a8a-932e-e65f9fcbb5ef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.572252] env[63241]: DEBUG oslo_vmware.api [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1751.572252] env[63241]: value = "task-1820840" [ 1751.572252] env[63241]: _type = "Task" [ 1751.572252] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.581936] env[63241]: DEBUG oslo_vmware.api [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820840, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.663298] env[63241]: DEBUG nova.objects.base [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Object Instance<29b6caa8-a07c-494b-b776-b08affa45c87> lazy-loaded attributes: flavor,info_cache {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1751.690715] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820839, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.695828] env[63241]: DEBUG nova.scheduler.client.report [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1751.794731] env[63241]: DEBUG nova.compute.manager [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1751.822775] env[63241]: DEBUG nova.virt.hardware [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1751.823098] env[63241]: DEBUG nova.virt.hardware [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1751.823265] env[63241]: DEBUG nova.virt.hardware [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1751.823484] env[63241]: DEBUG nova.virt.hardware [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1751.823655] env[63241]: DEBUG nova.virt.hardware [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1751.823812] env[63241]: DEBUG nova.virt.hardware [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1751.824263] env[63241]: DEBUG nova.virt.hardware [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1751.824557] env[63241]: DEBUG nova.virt.hardware [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1751.824815] env[63241]: DEBUG nova.virt.hardware [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1751.825306] env[63241]: DEBUG nova.virt.hardware [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1751.825306] env[63241]: DEBUG nova.virt.hardware [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1751.826344] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93725f97-907e-48f8-8d28-076cf3a58281 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.836407] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf02a9a4-d7d0-425f-ac31-3282e90b18fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.956318] env[63241]: DEBUG oslo_vmware.api [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820834, 'name': ReconfigVM_Task, 'duration_secs': 6.969482} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.956616] env[63241]: DEBUG oslo_concurrency.lockutils [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.956810] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Reconfigured VM to detach interface {{(pid=63241) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1752.023367] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820837, 'name': CreateVM_Task, 'duration_secs': 5.979837} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.023544] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1752.024221] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.024390] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.024705] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1752.024993] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88050436-22ec-42e7-9c04-da2352ed5a28 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.029710] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1752.029710] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5233b16a-a88d-c237-10a3-2cf089e6c9ba" [ 1752.029710] env[63241]: _type = "Task" [ 1752.029710] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.037906] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5233b16a-a88d-c237-10a3-2cf089e6c9ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.083486] env[63241]: DEBUG oslo_vmware.api [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820840, 'name': PowerOffVM_Task, 'duration_secs': 0.233489} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.083790] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1752.083954] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1752.086074] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd2c3fd1-4e04-4806-b813-3709bb85c5de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.192234] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820839, 'name': CreateSnapshot_Task, 'duration_secs': 0.955191} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.192499] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1752.193266] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf98396c-46a5-46b7-bfc5-939c3e5563cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.203479] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.440s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1752.203738] env[63241]: INFO nova.compute.manager [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Migrating [ 1752.218290] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.173s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.218511] env[63241]: DEBUG nova.objects.instance [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Lazy-loading 'resources' on Instance uuid f65e5b00-38b5-4453-b370-1f56f18053eb {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1752.337895] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1752.337895] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1752.338344] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Deleting the datastore file [datastore1] 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1752.338749] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4bcf97b-9e76-4204-95a0-0bac9ce94670 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.345194] env[63241]: DEBUG oslo_vmware.api [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for the task: (returnval){ [ 1752.345194] env[63241]: value = "task-1820842" [ 1752.345194] env[63241]: _type = "Task" [ 1752.345194] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.354012] env[63241]: DEBUG oslo_vmware.api [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820842, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.545026] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5233b16a-a88d-c237-10a3-2cf089e6c9ba, 'name': SearchDatastore_Task, 'duration_secs': 0.010829} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.545026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.545026] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1752.545026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.545026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.545026] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1752.545721] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcd32835-5281-4b59-bcf8-cd6024f3d27e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.564811] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1752.565024] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1752.565808] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3ebbf82-0000-4793-9c34-d7b6e162372a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.571292] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1752.571292] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c5e90a-0204-cecc-d53b-b836c2c5ddc7" [ 1752.571292] env[63241]: _type = "Task" [ 1752.571292] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.584306] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c5e90a-0204-cecc-d53b-b836c2c5ddc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.682549] env[63241]: DEBUG nova.compute.manager [req-2aa3c1b1-e846-4571-88f0-c0d2d04428e9 req-93245159-e5fc-428d-8b63-0116a1ec6834 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received event network-vif-deleted-ceb9556a-7bff-425f-bfd2-a6dba68ba438 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1752.682895] env[63241]: INFO nova.compute.manager [req-2aa3c1b1-e846-4571-88f0-c0d2d04428e9 req-93245159-e5fc-428d-8b63-0116a1ec6834 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Neutron deleted interface ceb9556a-7bff-425f-bfd2-a6dba68ba438; detaching it from the instance and deleting it from the info cache [ 1752.683346] env[63241]: DEBUG nova.network.neutron [req-2aa3c1b1-e846-4571-88f0-c0d2d04428e9 req-93245159-e5fc-428d-8b63-0116a1ec6834 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updating instance_info_cache with network_info: [{"id": "10657b5b-6750-4389-b802-7e6bee8963e7", "address": "fa:16:3e:ea:10:72", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10657b5b-67", "ovs_interfaceid": "10657b5b-6750-4389-b802-7e6bee8963e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "15a93d52-2bf4-4998-9189-94c3ba81a5ae", "address": "fa:16:3e:69:de:6c", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15a93d52-2b", "ovs_interfaceid": "15a93d52-2bf4-4998-9189-94c3ba81a5ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.735870] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1752.736520] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.737731] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.737731] env[63241]: DEBUG nova.network.neutron [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1752.738511] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1082aaea-d577-4bb4-aa1e-8e7b9fe15be8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.758744] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1752.758744] env[63241]: value = "task-1820843" [ 1752.758744] env[63241]: _type = "Task" [ 1752.758744] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.770069] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820843, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.798168] env[63241]: DEBUG nova.network.neutron [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Updating instance_info_cache with network_info: [{"id": "7b46f450-f9df-492c-bc52-8760f14afb90", "address": "fa:16:3e:e6:47:b4", "network": {"id": "a8a8022c-fd6d-4145-af3c-875a85e306e6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-472658781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bfa018174324b20863367a034d512da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b46f450-f9", "ovs_interfaceid": "7b46f450-f9df-492c-bc52-8760f14afb90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.857224] env[63241]: DEBUG oslo_vmware.api [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Task: {'id': task-1820842, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193882} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.860022] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1752.860243] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1752.860446] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1752.860622] env[63241]: INFO nova.compute.manager [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1752.860872] env[63241]: DEBUG oslo.service.loopingcall [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1752.861294] env[63241]: DEBUG nova.compute.manager [-] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1752.861403] env[63241]: DEBUG nova.network.neutron [-] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1752.939981] env[63241]: DEBUG nova.network.neutron [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Successfully updated port: a6b9cb89-6a29-44b5-91b1-0591266c582b {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1752.989398] env[63241]: DEBUG nova.compute.manager [req-819351ac-0507-44c7-b5f9-ecd0cb9be4ab req-0647f197-a88a-4f58-a33c-0420f614ba8e service nova] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Received event network-vif-plugged-a6b9cb89-6a29-44b5-91b1-0591266c582b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1752.989684] env[63241]: DEBUG oslo_concurrency.lockutils [req-819351ac-0507-44c7-b5f9-ecd0cb9be4ab req-0647f197-a88a-4f58-a33c-0420f614ba8e service nova] Acquiring lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.989955] env[63241]: DEBUG oslo_concurrency.lockutils [req-819351ac-0507-44c7-b5f9-ecd0cb9be4ab req-0647f197-a88a-4f58-a33c-0420f614ba8e service nova] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.990307] env[63241]: DEBUG oslo_concurrency.lockutils [req-819351ac-0507-44c7-b5f9-ecd0cb9be4ab req-0647f197-a88a-4f58-a33c-0420f614ba8e service nova] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1752.990307] env[63241]: DEBUG nova.compute.manager [req-819351ac-0507-44c7-b5f9-ecd0cb9be4ab req-0647f197-a88a-4f58-a33c-0420f614ba8e service nova] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] No waiting events found dispatching network-vif-plugged-a6b9cb89-6a29-44b5-91b1-0591266c582b {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1752.990588] env[63241]: WARNING nova.compute.manager [req-819351ac-0507-44c7-b5f9-ecd0cb9be4ab req-0647f197-a88a-4f58-a33c-0420f614ba8e service nova] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Received unexpected event network-vif-plugged-a6b9cb89-6a29-44b5-91b1-0591266c582b for instance with vm_state building and task_state spawning. [ 1753.037765] env[63241]: DEBUG nova.network.neutron [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Successfully updated port: 6be29b8b-a0d6-4346-b774-5faf878f177c {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1753.084801] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c5e90a-0204-cecc-d53b-b836c2c5ddc7, 'name': SearchDatastore_Task, 'duration_secs': 0.013937} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.085638] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65ce3977-2995-4adf-84fa-3060ef28dcbe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.095023] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1753.095023] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52227eda-6058-8a8e-a154-ca1f6c1b6cd4" [ 1753.095023] env[63241]: _type = "Task" [ 1753.095023] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.100312] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52227eda-6058-8a8e-a154-ca1f6c1b6cd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.186540] env[63241]: DEBUG oslo_concurrency.lockutils [req-2aa3c1b1-e846-4571-88f0-c0d2d04428e9 req-93245159-e5fc-428d-8b63-0116a1ec6834 service nova] Acquiring lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.186738] env[63241]: DEBUG oslo_concurrency.lockutils [req-2aa3c1b1-e846-4571-88f0-c0d2d04428e9 req-93245159-e5fc-428d-8b63-0116a1ec6834 service nova] Acquired lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.188036] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3bf402-40c1-4489-a0b8-2f13a36dedcd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.206062] env[63241]: DEBUG oslo_concurrency.lockutils [req-2aa3c1b1-e846-4571-88f0-c0d2d04428e9 req-93245159-e5fc-428d-8b63-0116a1ec6834 service nova] Releasing lock "9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.206420] env[63241]: WARNING nova.compute.manager [req-2aa3c1b1-e846-4571-88f0-c0d2d04428e9 req-93245159-e5fc-428d-8b63-0116a1ec6834 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Detach interface failed, port_id=ceb9556a-7bff-425f-bfd2-a6dba68ba438, reason: No device with interface-id ceb9556a-7bff-425f-bfd2-a6dba68ba438 exists on VM: nova.exception.NotFound: No device with interface-id ceb9556a-7bff-425f-bfd2-a6dba68ba438 exists on VM [ 1753.208463] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd00ab7-45d2-4cd0-93e9-c4720c346344 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.215767] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7602cb-780f-4c95-a005-61fe6e4c4b57 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.248058] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-402ad494-e098-43fd-901b-9a0d9e980c07 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.258019] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b886e8e-5cb7-4cdc-ae93-1126832853bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.274851] env[63241]: DEBUG nova.compute.provider_tree [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1753.279619] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820843, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.300478] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Releasing lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.445884] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "refresh_cache-b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.446088] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "refresh_cache-b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.446220] env[63241]: DEBUG nova.network.neutron [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1753.461926] env[63241]: DEBUG oslo_concurrency.lockutils [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.462118] env[63241]: DEBUG oslo_concurrency.lockutils [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.462295] env[63241]: DEBUG nova.network.neutron [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1753.484935] env[63241]: DEBUG nova.network.neutron [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance_info_cache with network_info: [{"id": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "address": "fa:16:3e:98:e9:3c", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5471bd-3b", "ovs_interfaceid": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1753.539179] env[63241]: DEBUG nova.network.neutron [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Successfully updated port: a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1753.542993] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.542993] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.542993] env[63241]: DEBUG nova.network.neutron [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1753.604216] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52227eda-6058-8a8e-a154-ca1f6c1b6cd4, 'name': SearchDatastore_Task, 'duration_secs': 0.025034} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.604529] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.604800] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 6b4debb5-5a83-45f7-bcf2-36a10f95f644/6b4debb5-5a83-45f7-bcf2-36a10f95f644.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1753.605075] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22bd10d9-9673-461e-8b52-06de5e593bc3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.612545] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1753.612545] env[63241]: value = "task-1820844" [ 1753.612545] env[63241]: _type = "Task" [ 1753.612545] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.621782] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.772382] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820843, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.781610] env[63241]: DEBUG nova.scheduler.client.report [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1753.805199] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1753.806053] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e031d5d-78e1-4911-bb7c-480df53b9c65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.813981] env[63241]: DEBUG oslo_vmware.api [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1753.813981] env[63241]: value = "task-1820845" [ 1753.813981] env[63241]: _type = "Task" [ 1753.813981] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.823968] env[63241]: DEBUG oslo_vmware.api [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820845, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.987414] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.023394] env[63241]: DEBUG nova.network.neutron [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1754.043191] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "refresh_cache-e62f49f0-370d-4b5d-ab43-72e0e6238432" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.043370] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "refresh_cache-e62f49f0-370d-4b5d-ab43-72e0e6238432" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.043590] env[63241]: DEBUG nova.network.neutron [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1754.077389] env[63241]: DEBUG nova.network.neutron [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1754.125014] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820844, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4969} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.128860] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 6b4debb5-5a83-45f7-bcf2-36a10f95f644/6b4debb5-5a83-45f7-bcf2-36a10f95f644.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1754.129150] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1754.129783] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b49ecbba-56ec-4c6e-bdca-28ec27f0014a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.137966] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1754.137966] env[63241]: value = "task-1820846" [ 1754.137966] env[63241]: _type = "Task" [ 1754.137966] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.146495] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820846, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.189425] env[63241]: DEBUG nova.network.neutron [-] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.219876] env[63241]: DEBUG nova.network.neutron [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance_info_cache with network_info: [{"id": "6be29b8b-a0d6-4346-b774-5faf878f177c", "address": "fa:16:3e:0f:60:27", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be29b8b-a0", "ovs_interfaceid": "6be29b8b-a0d6-4346-b774-5faf878f177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.275142] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820843, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.287338] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.069s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.289858] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.959s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.290457] env[63241]: DEBUG nova.objects.instance [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lazy-loading 'resources' on Instance uuid e3842404-2c80-4fa9-b0c9-c58c484845a2 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.313110] env[63241]: INFO nova.scheduler.client.report [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Deleted allocations for instance f65e5b00-38b5-4453-b370-1f56f18053eb [ 1754.330033] env[63241]: DEBUG oslo_vmware.api [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820845, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.430314] env[63241]: DEBUG nova.network.neutron [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Updating instance_info_cache with network_info: [{"id": "a6b9cb89-6a29-44b5-91b1-0591266c582b", "address": "fa:16:3e:ec:74:a0", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6b9cb89-6a", "ovs_interfaceid": "a6b9cb89-6a29-44b5-91b1-0591266c582b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.564577] env[63241]: INFO nova.network.neutron [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Port 15a93d52-2bf4-4998-9189-94c3ba81a5ae from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1754.565048] env[63241]: DEBUG nova.network.neutron [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updating instance_info_cache with network_info: [{"id": "10657b5b-6750-4389-b802-7e6bee8963e7", "address": "fa:16:3e:ea:10:72", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10657b5b-67", "ovs_interfaceid": "10657b5b-6750-4389-b802-7e6bee8963e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.627351] env[63241]: DEBUG nova.network.neutron [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1754.648829] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820846, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.489603} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.649129] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1754.649916] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35454a6-b647-43c0-abc8-2225a1162644 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.674644] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 6b4debb5-5a83-45f7-bcf2-36a10f95f644/6b4debb5-5a83-45f7-bcf2-36a10f95f644.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1754.677241] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-466327cc-d34f-4c9e-9d6a-3d2412b0616d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.695255] env[63241]: INFO nova.compute.manager [-] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Took 1.83 seconds to deallocate network for instance. [ 1754.705322] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1754.705322] env[63241]: value = "task-1820847" [ 1754.705322] env[63241]: _type = "Task" [ 1754.705322] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.715773] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820847, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.723190] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.723498] env[63241]: DEBUG nova.compute.manager [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Instance network_info: |[{"id": "6be29b8b-a0d6-4346-b774-5faf878f177c", "address": "fa:16:3e:0f:60:27", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be29b8b-a0", "ovs_interfaceid": "6be29b8b-a0d6-4346-b774-5faf878f177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1754.723985] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:60:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6be29b8b-a0d6-4346-b774-5faf878f177c', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1754.732678] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Creating folder: Project (855da29218ba4391a208e2835f60ee11). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1754.733905] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13ac015d-c470-422b-b4fc-e16687166efa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.746702] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Created folder: Project (855da29218ba4391a208e2835f60ee11) in parent group-v376927. [ 1754.746928] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Creating folder: Instances. Parent ref: group-v377163. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1754.747166] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46751f96-b60a-43a0-8a87-4327e95abdad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.757084] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Created folder: Instances in parent group-v377163. [ 1754.757407] env[63241]: DEBUG oslo.service.loopingcall [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1754.757660] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1754.757915] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abc01e6a-8281-4803-9f86-cd5a751bba30 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.784576] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820843, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.788972] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1754.788972] env[63241]: value = "task-1820850" [ 1754.788972] env[63241]: _type = "Task" [ 1754.788972] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.800602] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820850, 'name': CreateVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.829304] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6370d850-f7bd-4c01-80ee-d3cda39e7c78 tempest-ServerRescueTestJSONUnderV235-255082110 tempest-ServerRescueTestJSONUnderV235-255082110-project-member] Lock "f65e5b00-38b5-4453-b370-1f56f18053eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.828s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.843476] env[63241]: DEBUG oslo_vmware.api [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820845, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.850523] env[63241]: DEBUG oslo_concurrency.lockutils [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "9d301157-6870-4452-9ae6-0d45c4338886" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.851042] env[63241]: DEBUG oslo_concurrency.lockutils [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "9d301157-6870-4452-9ae6-0d45c4338886" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.851340] env[63241]: DEBUG oslo_concurrency.lockutils [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "9d301157-6870-4452-9ae6-0d45c4338886-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.851340] env[63241]: DEBUG oslo_concurrency.lockutils [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "9d301157-6870-4452-9ae6-0d45c4338886-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.851532] env[63241]: DEBUG oslo_concurrency.lockutils [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "9d301157-6870-4452-9ae6-0d45c4338886-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.853674] env[63241]: INFO nova.compute.manager [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Terminating instance [ 1754.855768] env[63241]: DEBUG nova.compute.manager [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1754.855958] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1754.863261] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01592f7-4613-455e-8420-11e54e4b645d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.869319] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1754.869580] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3cca3a26-ab70-411a-8a2e-5b76ff6f90ba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.877408] env[63241]: DEBUG oslo_vmware.api [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1754.877408] env[63241]: value = "task-1820851" [ 1754.877408] env[63241]: _type = "Task" [ 1754.877408] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.886171] env[63241]: DEBUG oslo_vmware.api [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820851, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.933217] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "refresh_cache-b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.933535] env[63241]: DEBUG nova.compute.manager [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Instance network_info: |[{"id": "a6b9cb89-6a29-44b5-91b1-0591266c582b", "address": "fa:16:3e:ec:74:a0", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6b9cb89-6a", "ovs_interfaceid": "a6b9cb89-6a29-44b5-91b1-0591266c582b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1754.934009] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:74:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6b9cb89-6a29-44b5-91b1-0591266c582b', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1754.942791] env[63241]: DEBUG oslo.service.loopingcall [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1754.947260] env[63241]: DEBUG nova.network.neutron [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Updating instance_info_cache with network_info: [{"id": "a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec", "address": "fa:16:3e:fe:33:68", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4ff42d0-66", "ovs_interfaceid": "a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.950191] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1754.950631] env[63241]: DEBUG nova.compute.manager [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Received event network-vif-plugged-a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1754.950829] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] Acquiring lock "e62f49f0-370d-4b5d-ab43-72e0e6238432-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.951035] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.951203] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.951367] env[63241]: DEBUG nova.compute.manager [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] No waiting events found dispatching network-vif-plugged-a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1754.951529] env[63241]: WARNING nova.compute.manager [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Received unexpected event network-vif-plugged-a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec for instance with vm_state building and task_state spawning. [ 1754.951691] env[63241]: DEBUG nova.compute.manager [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Received event network-changed-a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1754.951842] env[63241]: DEBUG nova.compute.manager [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Refreshing instance network info cache due to event network-changed-a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1754.952010] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] Acquiring lock "refresh_cache-e62f49f0-370d-4b5d-ab43-72e0e6238432" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.953405] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c297c46a-c9cf-478e-86fc-748a4c5aa409 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.979941] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1754.979941] env[63241]: value = "task-1820852" [ 1754.979941] env[63241]: _type = "Task" [ 1754.979941] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.992069] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820852, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.067738] env[63241]: DEBUG oslo_concurrency.lockutils [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-9d301157-6870-4452-9ae6-0d45c4338886" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.153675] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca748942-a11d-4811-bd5b-7705a953bf4b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.165911] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187e15f8-8ba5-466b-8a4e-b9a4423db3ae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.201541] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae36ac12-82c7-4b71-b8b0-024d64493b63 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.205523] env[63241]: DEBUG nova.compute.manager [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Received event network-vif-plugged-6be29b8b-a0d6-4346-b774-5faf878f177c {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1755.205767] env[63241]: DEBUG oslo_concurrency.lockutils [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] Acquiring lock "e28ba013-0bc5-4edc-858d-674980bc8742-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.205984] env[63241]: DEBUG oslo_concurrency.lockutils [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] Lock "e28ba013-0bc5-4edc-858d-674980bc8742-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.206194] env[63241]: DEBUG oslo_concurrency.lockutils [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] Lock "e28ba013-0bc5-4edc-858d-674980bc8742-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.206385] env[63241]: DEBUG nova.compute.manager [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] No waiting events found dispatching network-vif-plugged-6be29b8b-a0d6-4346-b774-5faf878f177c {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1755.206601] env[63241]: WARNING nova.compute.manager [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Received unexpected event network-vif-plugged-6be29b8b-a0d6-4346-b774-5faf878f177c for instance with vm_state building and task_state spawning. [ 1755.206793] env[63241]: DEBUG nova.compute.manager [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Received event network-changed-a6b9cb89-6a29-44b5-91b1-0591266c582b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1755.206953] env[63241]: DEBUG nova.compute.manager [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Refreshing instance network info cache due to event network-changed-a6b9cb89-6a29-44b5-91b1-0591266c582b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1755.207182] env[63241]: DEBUG oslo_concurrency.lockutils [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] Acquiring lock "refresh_cache-b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.207720] env[63241]: DEBUG oslo_concurrency.lockutils [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] Acquired lock "refresh_cache-b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.207720] env[63241]: DEBUG nova.network.neutron [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Refreshing network info cache for port a6b9cb89-6a29-44b5-91b1-0591266c582b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1755.210049] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.223213] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4057d820-a190-403c-a0c3-2ab187216719 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.227676] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820847, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.240365] env[63241]: DEBUG nova.compute.provider_tree [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1755.284925] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820843, 'name': CloneVM_Task} progress is 95%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.297831] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820850, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.327861] env[63241]: DEBUG oslo_vmware.api [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820845, 'name': PowerOnVM_Task, 'duration_secs': 1.118934} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.328264] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1755.328526] env[63241]: DEBUG nova.compute.manager [None req-8d752dcb-799c-4811-9bfa-556a3210344e tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1755.329385] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1704ea47-5780-4a97-903b-011e8aef97d9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.387737] env[63241]: DEBUG oslo_vmware.api [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820851, 'name': PowerOffVM_Task, 'duration_secs': 0.348578} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.388646] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1755.388816] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1755.389072] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bc41b18d-6070-4a0e-8b63-13f66be88b3f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.450485] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "refresh_cache-e62f49f0-370d-4b5d-ab43-72e0e6238432" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.451075] env[63241]: DEBUG nova.compute.manager [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Instance network_info: |[{"id": "a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec", "address": "fa:16:3e:fe:33:68", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4ff42d0-66", "ovs_interfaceid": "a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1755.451518] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] Acquired lock "refresh_cache-e62f49f0-370d-4b5d-ab43-72e0e6238432" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.451820] env[63241]: DEBUG nova.network.neutron [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Refreshing network info cache for port a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1755.453280] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:33:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1755.465714] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Creating folder: Project (f6c76b46a4cf4a32a4a1c25fb81a963d). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1755.467286] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48477199-d70a-4dcb-8f72-a4bcd9946db8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.479811] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Created folder: Project (f6c76b46a4cf4a32a4a1c25fb81a963d) in parent group-v376927. [ 1755.480042] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Creating folder: Instances. Parent ref: group-v377167. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1755.480339] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-901ab801-ce40-4d8b-a399-4f6846111fd1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.491396] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820852, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.494390] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Created folder: Instances in parent group-v377167. [ 1755.494665] env[63241]: DEBUG oslo.service.loopingcall [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1755.494881] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1755.495163] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a65cfe5-e48f-41b5-bf29-1e366f549605 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.513925] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcba79f-efad-4e9d-a415-c3c7c3f45e6c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.517998] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1755.517998] env[63241]: value = "task-1820856" [ 1755.517998] env[63241]: _type = "Task" [ 1755.517998] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.535985] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance '864175e0-33f0-429f-bdf6-722d9b00da2b' progress to 0 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1755.545210] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820856, 'name': CreateVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.574605] env[63241]: DEBUG oslo_concurrency.lockutils [None req-24775178-fcfa-41f6-92fb-00e937ad8a5f tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-9d301157-6870-4452-9ae6-0d45c4338886-ceb9556a-7bff-425f-bfd2-a6dba68ba438" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.239s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.667674] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1755.668120] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1755.668299] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleting the datastore file [datastore1] 9d301157-6870-4452-9ae6-0d45c4338886 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1755.668456] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3924f96a-b4c1-4804-a9b3-268aca4c025c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.674731] env[63241]: DEBUG oslo_vmware.api [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1755.674731] env[63241]: value = "task-1820857" [ 1755.674731] env[63241]: _type = "Task" [ 1755.674731] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.682783] env[63241]: DEBUG oslo_vmware.api [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.722466] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820847, 'name': ReconfigVM_Task, 'duration_secs': 0.627503} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.722754] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 6b4debb5-5a83-45f7-bcf2-36a10f95f644/6b4debb5-5a83-45f7-bcf2-36a10f95f644.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1755.723418] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af3e02b6-84a2-4d0c-bb81-1f76bafa6e1e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.730715] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1755.730715] env[63241]: value = "task-1820858" [ 1755.730715] env[63241]: _type = "Task" [ 1755.730715] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.739679] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820858, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.743011] env[63241]: DEBUG nova.scheduler.client.report [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1755.789714] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820843, 'name': CloneVM_Task, 'duration_secs': 2.677498} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.790311] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Created linked-clone VM from snapshot [ 1755.796018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a4f46e-49a3-4ea8-b9b1-327f25b1374f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.810765] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820850, 'name': CreateVM_Task, 'duration_secs': 0.56152} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.819801] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1755.819801] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Uploading image d1859f1b-bae7-4679-bf8c-0138bfa1f249 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1755.823523] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.823523] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.824197] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1755.828854] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-531823f7-e8d2-4b15-a0fc-da7b3f0a39ef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.832200] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1755.832200] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a215eb-a075-2623-eb63-c48bb612dc29" [ 1755.832200] env[63241]: _type = "Task" [ 1755.832200] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.840570] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1755.840570] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c8751fb4-0c8c-490b-a369-9694abed7f91 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.854280] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a215eb-a075-2623-eb63-c48bb612dc29, 'name': SearchDatastore_Task, 'duration_secs': 0.009977} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.854280] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.854280] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1755.854280] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.854280] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.854280] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1755.854280] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e71469b-27a5-45d9-b405-f2e20ab06675 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.863139] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1755.863139] env[63241]: value = "task-1820859" [ 1755.863139] env[63241]: _type = "Task" [ 1755.863139] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.875262] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820859, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.881840] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1755.882181] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1755.883060] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c9ad21f-b720-4bf2-8b5e-f07c867ad217 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.893455] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1755.893455] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52779588-629b-f4f5-cd73-757f59fdeda0" [ 1755.893455] env[63241]: _type = "Task" [ 1755.893455] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.903975] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52779588-629b-f4f5-cd73-757f59fdeda0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.990537] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820852, 'name': CreateVM_Task, 'duration_secs': 0.64158} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.990865] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1755.991400] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.991572] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.991893] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1755.992167] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf3e999d-5b84-4ce2-b33f-1fdea7df763f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.997102] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1755.997102] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52fa82d2-a393-0275-58c1-5c2df52287c3" [ 1755.997102] env[63241]: _type = "Task" [ 1755.997102] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.005318] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fa82d2-a393-0275-58c1-5c2df52287c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.027993] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820856, 'name': CreateVM_Task, 'duration_secs': 0.44688} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.028261] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1756.028917] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.041938] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1756.042245] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a4a7de6-5624-4e0a-b1de-67a23b3f1ec1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.050325] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1756.050325] env[63241]: value = "task-1820860" [ 1756.050325] env[63241]: _type = "Task" [ 1756.050325] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.060566] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820860, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.186391] env[63241]: DEBUG oslo_vmware.api [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820857, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.222163} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.186735] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1756.186955] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1756.187155] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1756.187400] env[63241]: INFO nova.compute.manager [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1756.187573] env[63241]: DEBUG oslo.service.loopingcall [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1756.187785] env[63241]: DEBUG nova.compute.manager [-] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1756.187886] env[63241]: DEBUG nova.network.neutron [-] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1756.238704] env[63241]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 15a93d52-2bf4-4998-9189-94c3ba81a5ae could not be found.", "detail": ""}} {{(pid=63241) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1756.238975] env[63241]: DEBUG nova.network.neutron [-] Unable to show port 15a93d52-2bf4-4998-9189-94c3ba81a5ae as it no longer exists. {{(pid=63241) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1756.246928] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820858, 'name': Rename_Task, 'duration_secs': 0.184456} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.247681] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.958s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.249552] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1756.251294] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.969s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.251515] env[63241]: DEBUG nova.objects.instance [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lazy-loading 'resources' on Instance uuid f372d405-f7d5-4e5f-8c36-fe9651af2a0d {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1756.254919] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f846d961-d9fa-4460-bfed-22cb5cf7d5eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.262875] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1756.262875] env[63241]: value = "task-1820861" [ 1756.262875] env[63241]: _type = "Task" [ 1756.262875] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.276920] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820861, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.278053] env[63241]: INFO nova.scheduler.client.report [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Deleted allocations for instance e3842404-2c80-4fa9-b0c9-c58c484845a2 [ 1756.371414] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820859, 'name': Destroy_Task, 'duration_secs': 0.483708} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.372318] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Destroyed the VM [ 1756.372772] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1756.372874] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a01e1c89-688e-4446-8ccb-bf8f0a83d58e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.380296] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1756.380296] env[63241]: value = "task-1820862" [ 1756.380296] env[63241]: _type = "Task" [ 1756.380296] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.389283] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820862, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.402702] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52779588-629b-f4f5-cd73-757f59fdeda0, 'name': SearchDatastore_Task, 'duration_secs': 0.012484} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.403540] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3154544-2b78-470f-8874-de4b34b99bd7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.408921] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1756.408921] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f3d489-5345-2c68-d61c-6cf9885ae967" [ 1756.408921] env[63241]: _type = "Task" [ 1756.408921] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.418982] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f3d489-5345-2c68-d61c-6cf9885ae967, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.452795] env[63241]: DEBUG nova.network.neutron [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Updated VIF entry in instance network info cache for port a6b9cb89-6a29-44b5-91b1-0591266c582b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1756.459166] env[63241]: DEBUG nova.network.neutron [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Updating instance_info_cache with network_info: [{"id": "a6b9cb89-6a29-44b5-91b1-0591266c582b", "address": "fa:16:3e:ec:74:a0", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6b9cb89-6a", "ovs_interfaceid": "a6b9cb89-6a29-44b5-91b1-0591266c582b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1756.507856] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fa82d2-a393-0275-58c1-5c2df52287c3, 'name': SearchDatastore_Task, 'duration_secs': 0.009316} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.508209] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.508469] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1756.508683] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.508944] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.509398] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1756.509614] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bf036da-65c9-4b19-a79f-200fba1876c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.514350] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1756.514350] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a2d567-a4ab-3f42-38da-7070cc2c10ec" [ 1756.514350] env[63241]: _type = "Task" [ 1756.514350] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.523174] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a2d567-a4ab-3f42-38da-7070cc2c10ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.560363] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820860, 'name': PowerOffVM_Task, 'duration_secs': 0.179449} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.560652] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1756.563956] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance '864175e0-33f0-429f-bdf6-722d9b00da2b' progress to 17 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1756.688759] env[63241]: DEBUG nova.network.neutron [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Updated VIF entry in instance network info cache for port a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1756.689199] env[63241]: DEBUG nova.network.neutron [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Updating instance_info_cache with network_info: [{"id": "a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec", "address": "fa:16:3e:fe:33:68", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4ff42d0-66", "ovs_interfaceid": "a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1756.773573] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820861, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.790048] env[63241]: DEBUG oslo_concurrency.lockutils [None req-91ac9452-0fe0-433b-8267-de66e9d973bc tempest-AttachInterfacesUnderV243Test-51790697 tempest-AttachInterfacesUnderV243Test-51790697-project-member] Lock "e3842404-2c80-4fa9-b0c9-c58c484845a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.871s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.901451] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820862, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.924662] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f3d489-5345-2c68-d61c-6cf9885ae967, 'name': SearchDatastore_Task, 'duration_secs': 0.03004} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.925842] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.925842] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e28ba013-0bc5-4edc-858d-674980bc8742/e28ba013-0bc5-4edc-858d-674980bc8742.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1756.925842] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.925842] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1756.926215] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fdf02c71-a279-4e7e-990d-d600d6fb4de5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.928144] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbdb8939-a6b6-4fd6-833c-b3941a59f7fa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.936479] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1756.936479] env[63241]: value = "task-1820863" [ 1756.936479] env[63241]: _type = "Task" [ 1756.936479] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.941354] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1756.941581] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1756.946254] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24a4c199-448e-4aeb-87a2-7921cf1be489 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.954375] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820863, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.959038] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1756.959038] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52941368-448b-dd59-4643-f7b6681e1e17" [ 1756.959038] env[63241]: _type = "Task" [ 1756.959038] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.960387] env[63241]: DEBUG oslo_concurrency.lockutils [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] Releasing lock "refresh_cache-b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.960387] env[63241]: DEBUG nova.compute.manager [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Received event network-changed-6be29b8b-a0d6-4346-b774-5faf878f177c {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1756.960387] env[63241]: DEBUG nova.compute.manager [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Refreshing instance network info cache due to event network-changed-6be29b8b-a0d6-4346-b774-5faf878f177c. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1756.960690] env[63241]: DEBUG oslo_concurrency.lockutils [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] Acquiring lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.960785] env[63241]: DEBUG oslo_concurrency.lockutils [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] Acquired lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.960947] env[63241]: DEBUG nova.network.neutron [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Refreshing network info cache for port 6be29b8b-a0d6-4346-b774-5faf878f177c {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1756.974591] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52941368-448b-dd59-4643-f7b6681e1e17, 'name': SearchDatastore_Task, 'duration_secs': 0.011741} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.979413] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-270d72ca-107f-4a1f-adc5-845a7a7e91f9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.985977] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1756.985977] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5262c6e4-475c-ec79-8349-30717475b767" [ 1756.985977] env[63241]: _type = "Task" [ 1756.985977] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.998533] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5262c6e4-475c-ec79-8349-30717475b767, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.029823] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a2d567-a4ab-3f42-38da-7070cc2c10ec, 'name': SearchDatastore_Task, 'duration_secs': 0.010368} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.029823] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.029823] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1757.030098] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.067923] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1757.068211] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1757.068435] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1757.068678] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1757.068877] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1757.069069] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1757.069327] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1757.069502] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1757.069736] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1757.069935] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1757.070129] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1757.076620] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67400c64-4b33-429e-896f-dfecb296385e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.088984] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023480b9-f3f3-4aa0-9017-297d380644b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.100655] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3d3a27-0218-4829-8246-1e944316dde8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.105084] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1757.105084] env[63241]: value = "task-1820864" [ 1757.105084] env[63241]: _type = "Task" [ 1757.105084] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.138690] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e62cb5-267d-4afd-831c-2157f5855ef4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.144625] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820864, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.149648] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa443c8-37ee-40b2-bbb2-2e8693c44b8b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.163537] env[63241]: DEBUG nova.compute.provider_tree [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1757.191877] env[63241]: DEBUG oslo_concurrency.lockutils [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] Releasing lock "refresh_cache-e62f49f0-370d-4b5d-ab43-72e0e6238432" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.192164] env[63241]: DEBUG nova.compute.manager [req-b0f69a33-84ba-4bba-8c1b-7a726ef67ebc req-acac78d2-77d8-460b-8890-c9a2954591b0 service nova] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Received event network-vif-deleted-0457ca89-42e2-485c-a958-773620259283 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1757.221386] env[63241]: DEBUG nova.network.neutron [-] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.277225] env[63241]: DEBUG oslo_vmware.api [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820861, 'name': PowerOnVM_Task, 'duration_secs': 0.752069} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.277856] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1757.278408] env[63241]: INFO nova.compute.manager [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Took 14.85 seconds to spawn the instance on the hypervisor. [ 1757.278915] env[63241]: DEBUG nova.compute.manager [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1757.280659] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d88a305-db7b-441f-a641-88ecdb1cbfd5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.337069] env[63241]: DEBUG nova.compute.manager [req-d77d3cbb-c249-4e3b-846d-8865b94eb6cf req-54f83058-ef7b-4579-8c94-330a9350005b service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received event network-vif-deleted-10657b5b-6750-4389-b802-7e6bee8963e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1757.393209] env[63241]: DEBUG oslo_vmware.api [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820862, 'name': RemoveSnapshot_Task, 'duration_secs': 0.781801} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.393744] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1757.453015] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820863, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.498845] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5262c6e4-475c-ec79-8349-30717475b767, 'name': SearchDatastore_Task, 'duration_secs': 0.011592} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.499158] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.499425] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e/b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1757.499769] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.499959] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1757.500246] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb0a07bc-88b8-4b20-8bc9-970eb6ac2fa0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.505896] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbd63e3f-4c03-40b7-8be9-ffd3404a3980 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.513419] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1757.513419] env[63241]: value = "task-1820865" [ 1757.513419] env[63241]: _type = "Task" [ 1757.513419] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.522997] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1757.524248] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1757.525427] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3791ce0-6d5c-4098-9b07-50c997327021 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.532945] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820865, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.539408] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1757.539408] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e88843-fdbb-ae17-8b35-423c38b9a731" [ 1757.539408] env[63241]: _type = "Task" [ 1757.539408] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.548343] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e88843-fdbb-ae17-8b35-423c38b9a731, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.615646] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820864, 'name': ReconfigVM_Task, 'duration_secs': 0.276175} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.619272] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance '864175e0-33f0-429f-bdf6-722d9b00da2b' progress to 33 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1757.684903] env[63241]: DEBUG nova.scheduler.client.report [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1757.723885] env[63241]: INFO nova.compute.manager [-] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Took 1.54 seconds to deallocate network for instance. [ 1757.807220] env[63241]: INFO nova.compute.manager [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Took 25.38 seconds to build instance. [ 1757.902454] env[63241]: WARNING nova.compute.manager [None req-ccd996a2-fbda-4b0b-b18b-06f06b29040b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Image not found during snapshot: nova.exception.ImageNotFound: Image d1859f1b-bae7-4679-bf8c-0138bfa1f249 could not be found. [ 1757.957132] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820863, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60193} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.957798] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e28ba013-0bc5-4edc-858d-674980bc8742/e28ba013-0bc5-4edc-858d-674980bc8742.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1757.958246] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1757.960088] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e3697254-347c-4f88-b13e-d640469c3af3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.969877] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1757.969877] env[63241]: value = "task-1820866" [ 1757.969877] env[63241]: _type = "Task" [ 1757.969877] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.970808] env[63241]: DEBUG nova.network.neutron [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updated VIF entry in instance network info cache for port 6be29b8b-a0d6-4346-b774-5faf878f177c. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1757.971250] env[63241]: DEBUG nova.network.neutron [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance_info_cache with network_info: [{"id": "6be29b8b-a0d6-4346-b774-5faf878f177c", "address": "fa:16:3e:0f:60:27", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be29b8b-a0", "ovs_interfaceid": "6be29b8b-a0d6-4346-b774-5faf878f177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.987779] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.026304] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820865, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496896} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.026682] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e/b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1758.026982] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1758.027345] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ff3a415-1682-48db-8831-ff6669c1bdc2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.035560] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1758.035560] env[63241]: value = "task-1820867" [ 1758.035560] env[63241]: _type = "Task" [ 1758.035560] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.051349] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820867, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.060102] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e88843-fdbb-ae17-8b35-423c38b9a731, 'name': SearchDatastore_Task, 'duration_secs': 0.029515} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.060854] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-594d17f2-e04d-458d-9560-3f8e3ab12708 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.068661] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1758.068661] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a1d3fa-3533-c5d8-396e-ff9e829e8740" [ 1758.068661] env[63241]: _type = "Task" [ 1758.068661] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.079663] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a1d3fa-3533-c5d8-396e-ff9e829e8740, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.129018] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1758.129018] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1758.129018] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1758.129018] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1758.129018] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1758.129018] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1758.133015] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1758.133015] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1758.133015] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1758.133015] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1758.133015] env[63241]: DEBUG nova.virt.hardware [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1758.136816] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Reconfiguring VM instance instance-00000057 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1758.137759] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-486db1d4-487e-482c-a36b-96a988b5d549 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.156867] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1758.156867] env[63241]: value = "task-1820868" [ 1758.156867] env[63241]: _type = "Task" [ 1758.156867] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.166888] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820868, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.197028] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.945s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.199248] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.252s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.200815] env[63241]: INFO nova.compute.claims [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1758.220102] env[63241]: INFO nova.scheduler.client.report [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Deleted allocations for instance f372d405-f7d5-4e5f-8c36-fe9651af2a0d [ 1758.230721] env[63241]: DEBUG oslo_concurrency.lockutils [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.310158] env[63241]: DEBUG oslo_concurrency.lockutils [None req-89fbd16f-cec1-450f-9829-2b5668a6b728 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.898s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.478574] env[63241]: DEBUG oslo_concurrency.lockutils [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] Releasing lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.479017] env[63241]: DEBUG nova.compute.manager [req-03937188-fd89-4b3d-83d4-9d50a8d01b1a req-d4bfde0c-78f9-43fd-ba79-b4a08a3b8534 service nova] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Received event network-vif-deleted-15a93d52-2bf4-4998-9189-94c3ba81a5ae {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1758.484157] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081357} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.484537] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1758.485665] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edd03f0-17d8-412c-a231-93c3be6a83a9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.511968] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] e28ba013-0bc5-4edc-858d-674980bc8742/e28ba013-0bc5-4edc-858d-674980bc8742.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1758.512222] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7038148a-1bc7-4cd4-87a7-27d4929d01a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.532336] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1758.532336] env[63241]: value = "task-1820869" [ 1758.532336] env[63241]: _type = "Task" [ 1758.532336] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.541896] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820869, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.546474] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820867, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.340355} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.546779] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1758.548271] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f776678-36a4-4e75-8664-cbaa0130083d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.579165] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e/b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1758.582707] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5a232b6-3fa8-4881-9148-99e1cd909ec1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.597485] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa578fb-c281-4800-93c3-b81433d9f1ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.607565] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a1d3fa-3533-c5d8-396e-ff9e829e8740, 'name': SearchDatastore_Task, 'duration_secs': 0.017925} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.609899] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.610177] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e62f49f0-370d-4b5d-ab43-72e0e6238432/e62f49f0-370d-4b5d-ab43-72e0e6238432.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1758.610443] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba23d19-319a-48b1-8742-ab75742c4bcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Suspending the VM {{(pid=63241) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1758.610696] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1758.610696] env[63241]: value = "task-1820870" [ 1758.610696] env[63241]: _type = "Task" [ 1758.610696] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.610881] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8103aee7-9cdc-4c28-9af0-654d8aab8b88 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.612643] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9e8f3b72-00af-4a9f-8cfc-905cc6de36b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.623119] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820870, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.625678] env[63241]: DEBUG oslo_vmware.api [None req-0ba23d19-319a-48b1-8742-ab75742c4bcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1758.625678] env[63241]: value = "task-1820871" [ 1758.625678] env[63241]: _type = "Task" [ 1758.625678] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.625985] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1758.625985] env[63241]: value = "task-1820872" [ 1758.625985] env[63241]: _type = "Task" [ 1758.625985] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.638042] env[63241]: DEBUG oslo_vmware.api [None req-0ba23d19-319a-48b1-8742-ab75742c4bcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820871, 'name': SuspendVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.640948] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820872, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.666803] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820868, 'name': ReconfigVM_Task, 'duration_secs': 0.316722} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.667788] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Reconfigured VM instance instance-00000057 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1758.668185] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec4efc9-cde7-4ae9-b165-8b9b653ea770 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.692542] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 864175e0-33f0-429f-bdf6-722d9b00da2b/864175e0-33f0-429f-bdf6-722d9b00da2b.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1758.692886] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a20f7984-96db-408e-9fde-6ba0d88913f1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.713487] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1758.713487] env[63241]: value = "task-1820873" [ 1758.713487] env[63241]: _type = "Task" [ 1758.713487] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.722720] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820873, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.729173] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbb8551a-f3f0-432f-bf3a-3a4048a05b33 tempest-ListServerFiltersTestJSON-2123634103 tempest-ListServerFiltersTestJSON-2123634103-project-member] Lock "f372d405-f7d5-4e5f-8c36-fe9651af2a0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.648s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.755784] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "b7378019-a572-4d4d-a82d-cee13a1b6a88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.756074] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "b7378019-a572-4d4d-a82d-cee13a1b6a88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.756286] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "b7378019-a572-4d4d-a82d-cee13a1b6a88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.756460] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "b7378019-a572-4d4d-a82d-cee13a1b6a88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.756641] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "b7378019-a572-4d4d-a82d-cee13a1b6a88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.758758] env[63241]: INFO nova.compute.manager [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Terminating instance [ 1758.760589] env[63241]: DEBUG nova.compute.manager [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1758.760786] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1758.764018] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121d274a-7d2a-430d-9376-91b302f95c69 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.770484] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1758.770766] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40adaf0a-3c3f-4042-a611-cd5cacdf7f67 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.777811] env[63241]: DEBUG oslo_vmware.api [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1758.777811] env[63241]: value = "task-1820874" [ 1758.777811] env[63241]: _type = "Task" [ 1758.777811] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.786869] env[63241]: DEBUG oslo_vmware.api [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820874, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.046504] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820869, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.125325] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820870, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.140088] env[63241]: DEBUG oslo_vmware.api [None req-0ba23d19-319a-48b1-8742-ab75742c4bcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820871, 'name': SuspendVM_Task} progress is 83%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.144708] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820872, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.227624] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820873, 'name': ReconfigVM_Task, 'duration_secs': 0.469886} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.231157] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 864175e0-33f0-429f-bdf6-722d9b00da2b/864175e0-33f0-429f-bdf6-722d9b00da2b.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1759.231157] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance '864175e0-33f0-429f-bdf6-722d9b00da2b' progress to 50 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1759.294897] env[63241]: DEBUG oslo_vmware.api [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820874, 'name': PowerOffVM_Task, 'duration_secs': 0.509819} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.294897] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1759.294897] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1759.294897] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f26f44a7-a324-4618-b521-532640d3f91c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.531319] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e338347-72ed-4e7c-b025-f1d12595184f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.545087] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10224020-11db-4cd1-bac3-5fcda414ec12 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.548303] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820869, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.579889] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa551167-c5a5-4a61-9299-ecf5e0628522 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.590995] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3223c8e9-5e9e-41ec-83b4-ae5a5d3bc2ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.605368] env[63241]: DEBUG nova.compute.provider_tree [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1759.626772] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820870, 'name': ReconfigVM_Task, 'duration_secs': 0.647121} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.628029] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Reconfigured VM instance instance-0000005a to attach disk [datastore1] b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e/b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1759.634648] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7a3b357-9b16-4b61-a8af-f417e66b1cc9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.642419] env[63241]: DEBUG oslo_vmware.api [None req-0ba23d19-319a-48b1-8742-ab75742c4bcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820871, 'name': SuspendVM_Task} progress is 83%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.647340] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820872, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598753} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.647340] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1759.647340] env[63241]: value = "task-1820876" [ 1759.647340] env[63241]: _type = "Task" [ 1759.647340] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.647340] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e62f49f0-370d-4b5d-ab43-72e0e6238432/e62f49f0-370d-4b5d-ab43-72e0e6238432.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1759.647606] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1759.648373] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-508b033f-19d3-468a-b687-9ac374cc8377 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.658751] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820876, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.661146] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1759.661146] env[63241]: value = "task-1820877" [ 1759.661146] env[63241]: _type = "Task" [ 1759.661146] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.671475] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.736954] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367943ac-83b9-442d-a35c-d12854b7faea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.761498] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a36099a-0dd3-4cf0-a434-2ed71cbcfdf7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.782820] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance '864175e0-33f0-429f-bdf6-722d9b00da2b' progress to 67 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1760.046857] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820869, 'name': ReconfigVM_Task, 'duration_secs': 1.275505} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.047257] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Reconfigured VM instance instance-0000005c to attach disk [datastore1] e28ba013-0bc5-4edc-858d-674980bc8742/e28ba013-0bc5-4edc-858d-674980bc8742.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1760.048244] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b0ecc5e-e90e-4540-91e6-505db97b8162 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.054979] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1760.054979] env[63241]: value = "task-1820878" [ 1760.054979] env[63241]: _type = "Task" [ 1760.054979] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.086023] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820878, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.108521] env[63241]: DEBUG nova.scheduler.client.report [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1760.137281] env[63241]: DEBUG oslo_vmware.api [None req-0ba23d19-319a-48b1-8742-ab75742c4bcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820871, 'name': SuspendVM_Task, 'duration_secs': 1.427921} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.137549] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba23d19-319a-48b1-8742-ab75742c4bcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Suspended the VM {{(pid=63241) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1760.137750] env[63241]: DEBUG nova.compute.manager [None req-0ba23d19-319a-48b1-8742-ab75742c4bcd tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1760.138679] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930ffec0-df0c-4660-a068-54da85599722 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.156844] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820876, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.171585] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.337801] env[63241]: DEBUG nova.network.neutron [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Port be5471bd-3bc7-4ef4-9ea6-be69b0420644 binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1760.567402] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820878, 'name': Rename_Task, 'duration_secs': 0.321508} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.567962] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1760.568671] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8df6a470-eceb-4cce-8241-1d14ac1c8199 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.580741] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1760.580741] env[63241]: value = "task-1820879" [ 1760.580741] env[63241]: _type = "Task" [ 1760.580741] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.581821] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1760.582083] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1760.582317] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Deleting the datastore file [datastore1] b7378019-a572-4d4d-a82d-cee13a1b6a88 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1760.585557] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e63c57ab-0032-493e-8dd6-ab672a5f7681 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.596914] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820879, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.598522] env[63241]: DEBUG oslo_vmware.api [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1760.598522] env[63241]: value = "task-1820880" [ 1760.598522] env[63241]: _type = "Task" [ 1760.598522] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.606439] env[63241]: DEBUG oslo_vmware.api [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.613775] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.614423] env[63241]: DEBUG nova.compute.manager [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1760.618486] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.850s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.620067] env[63241]: INFO nova.compute.claims [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1760.660762] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820876, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.671944] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.092527] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820879, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.107552] env[63241]: DEBUG oslo_vmware.api [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.127518] env[63241]: DEBUG nova.compute.utils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1761.129030] env[63241]: DEBUG nova.compute.manager [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1761.129216] env[63241]: DEBUG nova.network.neutron [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1761.160578] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820876, 'name': Rename_Task, 'duration_secs': 1.496187} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.160996] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1761.161147] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a99d1ded-f0b2-47a3-8fb2-f5d6ad097d77 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.167650] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1761.167650] env[63241]: value = "task-1820881" [ 1761.167650] env[63241]: _type = "Task" [ 1761.167650] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.169045] env[63241]: DEBUG nova.policy [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2f5065726be41378be3b5bc0198f9fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08e0c8d883004d0fb18507be072eb781', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1761.173548] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820877, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.400574} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.176816] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1761.177893] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ae2915-c124-4a5e-876b-4cc092f3d0a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.181383] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.181607] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.181777] env[63241]: INFO nova.compute.manager [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Shelving [ 1761.186923] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820881, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.198390] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.198504] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.198666] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.198851] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.199041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.210091] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] e62f49f0-370d-4b5d-ab43-72e0e6238432/e62f49f0-370d-4b5d-ab43-72e0e6238432.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1761.210760] env[63241]: INFO nova.compute.manager [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Terminating instance [ 1761.215419] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74d62801-5e62-495e-a886-0a484d1d5d78 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.231530] env[63241]: DEBUG nova.compute.manager [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1761.231530] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1761.232331] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42680c51-55b8-40d6-96fc-251790783fcb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.240740] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1761.241989] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1726f45-b78a-4b2c-b401-df9dba40d404 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.243599] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1761.243599] env[63241]: value = "task-1820882" [ 1761.243599] env[63241]: _type = "Task" [ 1761.243599] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.253466] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820882, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.345280] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1761.345549] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1761.345755] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleting the datastore file [datastore1] 6b4debb5-5a83-45f7-bcf2-36a10f95f644 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1761.346169] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1003079b-0f92-4df5-9159-ba2773f3e990 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.366296] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "864175e0-33f0-429f-bdf6-722d9b00da2b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.366594] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.366811] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.370329] env[63241]: DEBUG oslo_vmware.api [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1761.370329] env[63241]: value = "task-1820884" [ 1761.370329] env[63241]: _type = "Task" [ 1761.370329] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.380515] env[63241]: DEBUG oslo_vmware.api [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.559551] env[63241]: DEBUG nova.network.neutron [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Successfully created port: f206ebca-5602-446b-aa53-e4a3d5686739 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1761.595566] env[63241]: DEBUG oslo_vmware.api [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1820879, 'name': PowerOnVM_Task, 'duration_secs': 0.8092} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.595862] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1761.596087] env[63241]: INFO nova.compute.manager [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Took 9.80 seconds to spawn the instance on the hypervisor. [ 1761.596690] env[63241]: DEBUG nova.compute.manager [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1761.597154] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930131b2-e75f-4b0f-80c2-54da8178790f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.613832] env[63241]: DEBUG oslo_vmware.api [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820880, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.583326} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.613832] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1761.613832] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1761.614052] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1761.614154] env[63241]: INFO nova.compute.manager [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Took 2.85 seconds to destroy the instance on the hypervisor. [ 1761.614485] env[63241]: DEBUG oslo.service.loopingcall [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1761.614744] env[63241]: DEBUG nova.compute.manager [-] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1761.614744] env[63241]: DEBUG nova.network.neutron [-] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1761.633457] env[63241]: DEBUG nova.compute.manager [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1761.686113] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820881, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.691823] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1761.692110] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d403d17f-0a88-4423-b19d-bab7525b26dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.701608] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1761.701608] env[63241]: value = "task-1820885" [ 1761.701608] env[63241]: _type = "Task" [ 1761.701608] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.712295] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820885, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.757121] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820882, 'name': ReconfigVM_Task, 'duration_secs': 0.294065} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.757465] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Reconfigured VM instance instance-0000005b to attach disk [datastore1] e62f49f0-370d-4b5d-ab43-72e0e6238432/e62f49f0-370d-4b5d-ab43-72e0e6238432.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1761.758150] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-135b2521-7954-4334-b5e3-1f10e30dc553 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.764932] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1761.764932] env[63241]: value = "task-1820886" [ 1761.764932] env[63241]: _type = "Task" [ 1761.764932] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.776130] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820886, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.889974] env[63241]: DEBUG oslo_vmware.api [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187861} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.890330] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1761.890521] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1761.890723] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1761.890873] env[63241]: INFO nova.compute.manager [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Took 0.66 seconds to destroy the instance on the hypervisor. [ 1761.891137] env[63241]: DEBUG oslo.service.loopingcall [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1761.891888] env[63241]: DEBUG nova.compute.manager [-] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1761.891888] env[63241]: DEBUG nova.network.neutron [-] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1761.928157] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4397c5f-72c4-4152-b11c-55056e501f42 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.936361] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77df70dc-5a45-4267-b242-6913e3ac9f91 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.967638] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c38f79-19d8-431e-9b22-f79628f8d10d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.975500] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d63074f-3be3-41bf-a062-954570fa56ce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.990705] env[63241]: DEBUG nova.compute.provider_tree [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1762.125177] env[63241]: INFO nova.compute.manager [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Took 26.43 seconds to build instance. [ 1762.145706] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.145928] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.148475] env[63241]: INFO nova.compute.manager [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Shelving [ 1762.189215] env[63241]: DEBUG oslo_vmware.api [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820881, 'name': PowerOnVM_Task, 'duration_secs': 0.521133} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.190566] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1762.190865] env[63241]: INFO nova.compute.manager [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Took 17.29 seconds to spawn the instance on the hypervisor. [ 1762.191151] env[63241]: DEBUG nova.compute.manager [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1762.192656] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d970a27e-4592-4633-89e4-3b68d760b6a4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.215320] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820885, 'name': PowerOffVM_Task, 'duration_secs': 0.188458} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.215593] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1762.216867] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c8f889-7901-42c7-bae3-c8d6bab326e8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.238020] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e0056d-cf59-4e5c-811a-76d6bb1f9573 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.277641] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820886, 'name': Rename_Task, 'duration_secs': 0.186092} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.280019] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1762.280019] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52bba677-7608-4532-8b89-dba7683aac53 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.287257] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1762.287257] env[63241]: value = "task-1820887" [ 1762.287257] env[63241]: _type = "Task" [ 1762.287257] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.298273] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.307233] env[63241]: DEBUG nova.compute.manager [req-9600d8d7-28da-402a-980e-64d9e32fccd8 req-f098669c-0e27-42cb-b527-622079fa806e service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Received event network-vif-deleted-cf92201f-3cfa-4edb-b9dd-b305a4c37115 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1762.307450] env[63241]: INFO nova.compute.manager [req-9600d8d7-28da-402a-980e-64d9e32fccd8 req-f098669c-0e27-42cb-b527-622079fa806e service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Neutron deleted interface cf92201f-3cfa-4edb-b9dd-b305a4c37115; detaching it from the instance and deleting it from the info cache [ 1762.307628] env[63241]: DEBUG nova.network.neutron [req-9600d8d7-28da-402a-980e-64d9e32fccd8 req-f098669c-0e27-42cb-b527-622079fa806e service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.416690] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1762.416911] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1762.417128] env[63241]: DEBUG nova.network.neutron [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1762.493679] env[63241]: DEBUG nova.scheduler.client.report [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1762.579358] env[63241]: DEBUG nova.network.neutron [-] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.626197] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9d9dd9e-626e-4d0f-9d99-171c6efd8f35 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.939s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.644097] env[63241]: DEBUG nova.compute.manager [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1762.655526] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1762.655809] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1485fd70-9ab6-4576-8168-42edbdddc0a6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.664043] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1762.664043] env[63241]: value = "task-1820888" [ 1762.664043] env[63241]: _type = "Task" [ 1762.664043] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.671182] env[63241]: DEBUG nova.virt.hardware [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1762.671500] env[63241]: DEBUG nova.virt.hardware [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1762.671712] env[63241]: DEBUG nova.virt.hardware [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1762.671947] env[63241]: DEBUG nova.virt.hardware [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1762.672124] env[63241]: DEBUG nova.virt.hardware [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1762.672331] env[63241]: DEBUG nova.virt.hardware [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1762.672560] env[63241]: DEBUG nova.virt.hardware [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1762.672780] env[63241]: DEBUG nova.virt.hardware [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1762.672985] env[63241]: DEBUG nova.virt.hardware [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1762.673236] env[63241]: DEBUG nova.virt.hardware [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1762.673424] env[63241]: DEBUG nova.virt.hardware [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1762.674485] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2aaf23-a76b-44a3-9cc5-e5ca4aa0c621 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.680469] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820888, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.686095] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b019e88-4b40-4d99-b6cb-c04790840bda {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.717395] env[63241]: INFO nova.compute.manager [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Took 27.86 seconds to build instance. [ 1762.748982] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1762.749294] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-563fadd9-0dc7-410b-ae44-7aa786b0aa9e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.758836] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1762.758836] env[63241]: value = "task-1820889" [ 1762.758836] env[63241]: _type = "Task" [ 1762.758836] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.768203] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820889, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.797509] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820887, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.810790] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b69da861-5293-48ce-bee3-2ab29fe6fae0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.821594] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0943c83a-91f5-4d84-b5fa-da2050ca735f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.852863] env[63241]: DEBUG nova.compute.manager [req-9600d8d7-28da-402a-980e-64d9e32fccd8 req-f098669c-0e27-42cb-b527-622079fa806e service nova] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Detach interface failed, port_id=cf92201f-3cfa-4edb-b9dd-b305a4c37115, reason: Instance b7378019-a572-4d4d-a82d-cee13a1b6a88 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1762.873145] env[63241]: DEBUG nova.network.neutron [-] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.998767] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.380s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.999309] env[63241]: DEBUG nova.compute.manager [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1763.002291] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.792s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.002515] env[63241]: DEBUG nova.objects.instance [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lazy-loading 'resources' on Instance uuid 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1763.082885] env[63241]: INFO nova.compute.manager [-] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Took 1.47 seconds to deallocate network for instance. [ 1763.175713] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820888, 'name': PowerOffVM_Task, 'duration_secs': 0.272538} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.176043] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1763.177167] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4a85a9-4763-4bc5-b5a9-c882892e8aba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.203014] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26468615-8da2-4632-b653-8db3345b5ebd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.219731] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73c9974f-8201-4569-811c-0489919bab7d tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.377s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.249309] env[63241]: DEBUG nova.network.neutron [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance_info_cache with network_info: [{"id": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "address": "fa:16:3e:98:e9:3c", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5471bd-3b", "ovs_interfaceid": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.257191] env[63241]: DEBUG nova.compute.manager [req-b4228d4a-4bc1-4d86-abf8-7f028db644f6 req-438189e3-5824-43db-a6e0-bb3a998bcdaa service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Received event network-vif-plugged-f206ebca-5602-446b-aa53-e4a3d5686739 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1763.257627] env[63241]: DEBUG oslo_concurrency.lockutils [req-b4228d4a-4bc1-4d86-abf8-7f028db644f6 req-438189e3-5824-43db-a6e0-bb3a998bcdaa service nova] Acquiring lock "037f539f-1bf1-4897-81b3-08c377b92211-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.257932] env[63241]: DEBUG oslo_concurrency.lockutils [req-b4228d4a-4bc1-4d86-abf8-7f028db644f6 req-438189e3-5824-43db-a6e0-bb3a998bcdaa service nova] Lock "037f539f-1bf1-4897-81b3-08c377b92211-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.258134] env[63241]: DEBUG oslo_concurrency.lockutils [req-b4228d4a-4bc1-4d86-abf8-7f028db644f6 req-438189e3-5824-43db-a6e0-bb3a998bcdaa service nova] Lock "037f539f-1bf1-4897-81b3-08c377b92211-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.258353] env[63241]: DEBUG nova.compute.manager [req-b4228d4a-4bc1-4d86-abf8-7f028db644f6 req-438189e3-5824-43db-a6e0-bb3a998bcdaa service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] No waiting events found dispatching network-vif-plugged-f206ebca-5602-446b-aa53-e4a3d5686739 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1763.258853] env[63241]: WARNING nova.compute.manager [req-b4228d4a-4bc1-4d86-abf8-7f028db644f6 req-438189e3-5824-43db-a6e0-bb3a998bcdaa service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Received unexpected event network-vif-plugged-f206ebca-5602-446b-aa53-e4a3d5686739 for instance with vm_state building and task_state spawning. [ 1763.270422] env[63241]: DEBUG nova.network.neutron [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Successfully updated port: f206ebca-5602-446b-aa53-e4a3d5686739 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1763.276219] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820889, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.298162] env[63241]: DEBUG oslo_vmware.api [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1820887, 'name': PowerOnVM_Task, 'duration_secs': 0.71695} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.298385] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1763.298587] env[63241]: INFO nova.compute.manager [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Took 13.93 seconds to spawn the instance on the hypervisor. [ 1763.298770] env[63241]: DEBUG nova.compute.manager [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1763.299584] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ed618b-b944-4090-9f18-3e9d24a37260 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.376307] env[63241]: INFO nova.compute.manager [-] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Took 1.48 seconds to deallocate network for instance. [ 1763.505802] env[63241]: DEBUG nova.compute.utils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1763.507353] env[63241]: DEBUG nova.compute.manager [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1763.507546] env[63241]: DEBUG nova.network.neutron [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1763.564214] env[63241]: DEBUG nova.policy [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78657a2bc34d4bb9922678ed287530f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18094134f49b4e84b83e97631bc22903', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1763.590425] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.714792] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1763.714792] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a5a80bb9-04d0-4875-9fd1-c41c1f4bcc76 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.723965] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1763.723965] env[63241]: value = "task-1820890" [ 1763.723965] env[63241]: _type = "Task" [ 1763.723965] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.733060] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820890, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.751967] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.773476] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820889, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.780123] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "refresh_cache-037f539f-1bf1-4897-81b3-08c377b92211" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.781559] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquired lock "refresh_cache-037f539f-1bf1-4897-81b3-08c377b92211" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.781559] env[63241]: DEBUG nova.network.neutron [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1763.800556] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d030a2d-e98c-4c2d-b001-f09f3b5b981e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.809261] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c0d39f-2970-4c5a-83c8-747cfec64470 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.827834] env[63241]: INFO nova.compute.manager [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Took 28.58 seconds to build instance. [ 1763.856528] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41c34435-8a65-43e5-919e-9fa4e1f3593d tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.620s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.857540] env[63241]: DEBUG nova.network.neutron [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Successfully created port: 7710fb49-7df6-4e60-ace2-b51c25d1d8ea {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1763.860130] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e1f7ea-be71-4dba-b9ee-5065bb1d4739 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.868376] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d652b2-3cb9-4f9b-9d22-bc9b278374b1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.883564] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.884091] env[63241]: DEBUG nova.compute.provider_tree [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1764.018857] env[63241]: DEBUG nova.compute.manager [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1764.186170] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.186548] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.186813] env[63241]: DEBUG nova.compute.manager [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1764.188039] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aaae4a1-c572-4e4d-83d5-bb28ef52b4db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.196053] env[63241]: DEBUG nova.compute.manager [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63241) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1764.196626] env[63241]: DEBUG nova.objects.instance [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lazy-loading 'flavor' on Instance uuid b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1764.234674] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820890, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.272908] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820889, 'name': CreateSnapshot_Task, 'duration_secs': 1.169195} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.274178] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1764.275062] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4cfa32-6399-4def-8558-3591062b203a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.278337] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff180d2-299d-48d9-89ff-bcf989b9fb02 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.307015] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0486b48-f3e0-4962-838e-54cec6af45a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.315185] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance '864175e0-33f0-429f-bdf6-722d9b00da2b' progress to 83 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1764.376303] env[63241]: DEBUG nova.network.neutron [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1764.387678] env[63241]: DEBUG nova.scheduler.client.report [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1764.465533] env[63241]: DEBUG nova.compute.manager [req-293f95ac-4a6c-4ef6-8cb0-541126eeaecc req-984b9f6f-1d2b-426b-8550-dfe7ccd5f7e0 service nova] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Received event network-vif-deleted-ead55549-b686-405a-a2d0-8a995905158b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1764.622056] env[63241]: DEBUG nova.network.neutron [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Updating instance_info_cache with network_info: [{"id": "f206ebca-5602-446b-aa53-e4a3d5686739", "address": "fa:16:3e:84:c3:dd", "network": {"id": "da181c86-2cd2-4b0b-bf7c-0c2bdbb63796", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-51851294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08e0c8d883004d0fb18507be072eb781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf206ebca-56", "ovs_interfaceid": "f206ebca-5602-446b-aa53-e4a3d5686739", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.702355] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1764.702645] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-237a3b82-4a35-4c14-852a-fa5911edbd70 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.710269] env[63241]: DEBUG oslo_vmware.api [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1764.710269] env[63241]: value = "task-1820891" [ 1764.710269] env[63241]: _type = "Task" [ 1764.710269] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.719159] env[63241]: DEBUG oslo_vmware.api [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.735034] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820890, 'name': CreateSnapshot_Task, 'duration_secs': 0.688161} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.735034] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1764.735218] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a038782-7b63-47e4-8ec2-63a70476ac00 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.820158] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1764.822833] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1764.823292] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8ab6624c-1924-4f0d-9503-69266d4c980c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.827229] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-880aff17-360d-47ea-8f38-7eab3a253403 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.834192] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1764.834192] env[63241]: value = "task-1820893" [ 1764.834192] env[63241]: _type = "Task" [ 1764.834192] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.835609] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1764.835609] env[63241]: value = "task-1820892" [ 1764.835609] env[63241]: _type = "Task" [ 1764.835609] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.855540] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820893, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.859014] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820892, 'name': CloneVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.893096] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.891s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.896078] env[63241]: DEBUG oslo_concurrency.lockutils [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.666s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.896346] env[63241]: DEBUG nova.objects.instance [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'resources' on Instance uuid 9d301157-6870-4452-9ae6-0d45c4338886 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1764.914897] env[63241]: INFO nova.scheduler.client.report [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Deleted allocations for instance 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce [ 1765.034358] env[63241]: DEBUG nova.compute.manager [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1765.062817] env[63241]: DEBUG nova.virt.hardware [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1765.063103] env[63241]: DEBUG nova.virt.hardware [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1765.063305] env[63241]: DEBUG nova.virt.hardware [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1765.063473] env[63241]: DEBUG nova.virt.hardware [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1765.063654] env[63241]: DEBUG nova.virt.hardware [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1765.063805] env[63241]: DEBUG nova.virt.hardware [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1765.064028] env[63241]: DEBUG nova.virt.hardware [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1765.064186] env[63241]: DEBUG nova.virt.hardware [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1765.064359] env[63241]: DEBUG nova.virt.hardware [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1765.064517] env[63241]: DEBUG nova.virt.hardware [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1765.064684] env[63241]: DEBUG nova.virt.hardware [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1765.065869] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9577ea8b-07bc-41ab-8f54-094b3d59b5ee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.074892] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5622f447-a7b2-48c5-9d50-e82cd49e560b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.124710] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Releasing lock "refresh_cache-037f539f-1bf1-4897-81b3-08c377b92211" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.125091] env[63241]: DEBUG nova.compute.manager [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Instance network_info: |[{"id": "f206ebca-5602-446b-aa53-e4a3d5686739", "address": "fa:16:3e:84:c3:dd", "network": {"id": "da181c86-2cd2-4b0b-bf7c-0c2bdbb63796", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-51851294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08e0c8d883004d0fb18507be072eb781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf206ebca-56", "ovs_interfaceid": "f206ebca-5602-446b-aa53-e4a3d5686739", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1765.125528] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:c3:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f206ebca-5602-446b-aa53-e4a3d5686739', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1765.133381] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Creating folder: Project (08e0c8d883004d0fb18507be072eb781). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1765.133670] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c771f00f-278b-4cea-91de-9af0e3bd5e06 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.144335] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Created folder: Project (08e0c8d883004d0fb18507be072eb781) in parent group-v376927. [ 1765.144515] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Creating folder: Instances. Parent ref: group-v377173. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1765.144752] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4033313d-25a4-41d9-bea5-25e75c630df7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.155216] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Created folder: Instances in parent group-v377173. [ 1765.155281] env[63241]: DEBUG oslo.service.loopingcall [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1765.155461] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1765.155679] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44df2504-690d-4052-875f-79b1c2d68548 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.175471] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1765.175471] env[63241]: value = "task-1820896" [ 1765.175471] env[63241]: _type = "Task" [ 1765.175471] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.186706] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820896, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.221090] env[63241]: DEBUG oslo_vmware.api [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820891, 'name': PowerOffVM_Task, 'duration_secs': 0.289367} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.221622] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1765.221893] env[63241]: DEBUG nova.compute.manager [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1765.222997] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32333e0e-d91e-44dd-9816-846033e65174 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.257912] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1765.258542] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d8b6407c-c6fa-489f-b0a4-da3ce70a506c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.270034] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1765.270034] env[63241]: value = "task-1820897" [ 1765.270034] env[63241]: _type = "Task" [ 1765.270034] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.282467] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820897, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.300307] env[63241]: DEBUG nova.compute.manager [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Received event network-changed-6be29b8b-a0d6-4346-b774-5faf878f177c {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1765.300759] env[63241]: DEBUG nova.compute.manager [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Refreshing instance network info cache due to event network-changed-6be29b8b-a0d6-4346-b774-5faf878f177c. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1765.301057] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Acquiring lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.301324] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Acquired lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.301421] env[63241]: DEBUG nova.network.neutron [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Refreshing network info cache for port 6be29b8b-a0d6-4346-b774-5faf878f177c {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1765.350730] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820893, 'name': PowerOnVM_Task} progress is 96%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.355960] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820892, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.371821] env[63241]: DEBUG nova.network.neutron [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Successfully updated port: 7710fb49-7df6-4e60-ace2-b51c25d1d8ea {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1765.425833] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8ec9a554-92c3-4eb0-bcb7-f45b227833c1 tempest-MigrationsAdminTest-801062833 tempest-MigrationsAdminTest-801062833-project-member] Lock "965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.875s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.654847] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54f7065-f149-42a0-8e65-2832eb0a75ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.662507] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8beee25-14d0-425b-a0f9-385cf1625259 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.697794] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf577e48-1e5c-48ee-9146-285a0a78c88b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.708131] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820896, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.709439] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37661030-5a27-40fc-8633-bf0266631c6a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.723355] env[63241]: DEBUG nova.compute.provider_tree [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1765.737304] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f0bbe9e-0c7f-4fa1-85b8-d7bd4a95e7ae tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.551s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.781796] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820897, 'name': CloneVM_Task} progress is 23%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.847340] env[63241]: DEBUG oslo_vmware.api [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820893, 'name': PowerOnVM_Task, 'duration_secs': 0.768754} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.850517] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1765.850730] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1deeb799-69c0-4f45-a32e-123dfad6c839 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance '864175e0-33f0-429f-bdf6-722d9b00da2b' progress to 100 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1765.859630] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820892, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.880021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "refresh_cache-46ac69f3-375c-4b60-bc33-83ad8577c4fb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.880021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "refresh_cache-46ac69f3-375c-4b60-bc33-83ad8577c4fb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.880021] env[63241]: DEBUG nova.network.neutron [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1766.062257] env[63241]: DEBUG nova.network.neutron [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updated VIF entry in instance network info cache for port 6be29b8b-a0d6-4346-b774-5faf878f177c. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1766.062707] env[63241]: DEBUG nova.network.neutron [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance_info_cache with network_info: [{"id": "6be29b8b-a0d6-4346-b774-5faf878f177c", "address": "fa:16:3e:0f:60:27", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be29b8b-a0", "ovs_interfaceid": "6be29b8b-a0d6-4346-b774-5faf878f177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.204325] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820896, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.227119] env[63241]: DEBUG nova.scheduler.client.report [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1766.293424] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820897, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.360985] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820892, 'name': CloneVM_Task, 'duration_secs': 1.397228} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.361454] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Created linked-clone VM from snapshot [ 1766.362546] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dea36ea-28b7-47b6-bb9c-b5b25ba213b3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.370930] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Uploading image f4486edd-4cdd-43f5-bb2b-0002ef417a28 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1766.400166] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1766.400166] env[63241]: value = "vm-377172" [ 1766.400166] env[63241]: _type = "VirtualMachine" [ 1766.400166] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1766.400444] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bacca794-3a10-49be-a63e-8e4af87278cb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.407938] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lease: (returnval){ [ 1766.407938] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52158f9a-81e0-33e4-525f-8126847ebaee" [ 1766.407938] env[63241]: _type = "HttpNfcLease" [ 1766.407938] env[63241]: } obtained for exporting VM: (result){ [ 1766.407938] env[63241]: value = "vm-377172" [ 1766.407938] env[63241]: _type = "VirtualMachine" [ 1766.407938] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1766.408206] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the lease: (returnval){ [ 1766.408206] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52158f9a-81e0-33e4-525f-8126847ebaee" [ 1766.408206] env[63241]: _type = "HttpNfcLease" [ 1766.408206] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1766.415070] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1766.415070] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52158f9a-81e0-33e4-525f-8126847ebaee" [ 1766.415070] env[63241]: _type = "HttpNfcLease" [ 1766.415070] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1766.417180] env[63241]: DEBUG nova.network.neutron [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1766.568735] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Releasing lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.568735] env[63241]: DEBUG nova.compute.manager [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Received event network-changed-f206ebca-5602-446b-aa53-e4a3d5686739 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1766.568735] env[63241]: DEBUG nova.compute.manager [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Refreshing instance network info cache due to event network-changed-f206ebca-5602-446b-aa53-e4a3d5686739. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1766.568735] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Acquiring lock "refresh_cache-037f539f-1bf1-4897-81b3-08c377b92211" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.568735] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Acquired lock "refresh_cache-037f539f-1bf1-4897-81b3-08c377b92211" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.568735] env[63241]: DEBUG nova.network.neutron [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Refreshing network info cache for port f206ebca-5602-446b-aa53-e4a3d5686739 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1766.628135] env[63241]: DEBUG nova.network.neutron [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Updating instance_info_cache with network_info: [{"id": "7710fb49-7df6-4e60-ace2-b51c25d1d8ea", "address": "fa:16:3e:63:f8:61", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7710fb49-7d", "ovs_interfaceid": "7710fb49-7df6-4e60-ace2-b51c25d1d8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.713337] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820896, 'name': CreateVM_Task, 'duration_secs': 1.086098} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.713945] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1766.714687] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.714871] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.715311] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1766.715590] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac5327ae-059b-492c-aedd-929be4985a92 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.721139] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1766.721139] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524abf19-87fb-dea3-f392-079525360511" [ 1766.721139] env[63241]: _type = "Task" [ 1766.721139] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.730143] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524abf19-87fb-dea3-f392-079525360511, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.731971] env[63241]: DEBUG oslo_concurrency.lockutils [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.836s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.735142] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.145s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.738830] env[63241]: DEBUG nova.objects.instance [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lazy-loading 'resources' on Instance uuid b7378019-a572-4d4d-a82d-cee13a1b6a88 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1766.753859] env[63241]: INFO nova.scheduler.client.report [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleted allocations for instance 9d301157-6870-4452-9ae6-0d45c4338886 [ 1766.789749] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820897, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.825574] env[63241]: INFO nova.compute.manager [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Rebuilding instance [ 1766.866857] env[63241]: DEBUG nova.compute.manager [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1766.871491] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e487cc-c7ca-4cd6-84b9-69959e47ae08 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.920720] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1766.920720] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52158f9a-81e0-33e4-525f-8126847ebaee" [ 1766.920720] env[63241]: _type = "HttpNfcLease" [ 1766.920720] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1766.920720] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1766.920720] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52158f9a-81e0-33e4-525f-8126847ebaee" [ 1766.920720] env[63241]: _type = "HttpNfcLease" [ 1766.920720] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1766.921724] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e9f4d2-580c-4361-b283-4655e8efbb30 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.930262] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523e9f50-21f9-33ec-86d9-f9b082ee902d/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1766.930689] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523e9f50-21f9-33ec-86d9-f9b082ee902d/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1767.046839] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4db2424c-b5ac-4314-980e-a79483c81c6c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.134407] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "refresh_cache-46ac69f3-375c-4b60-bc33-83ad8577c4fb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.134714] env[63241]: DEBUG nova.compute.manager [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Instance network_info: |[{"id": "7710fb49-7df6-4e60-ace2-b51c25d1d8ea", "address": "fa:16:3e:63:f8:61", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7710fb49-7d", "ovs_interfaceid": "7710fb49-7df6-4e60-ace2-b51c25d1d8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1767.135214] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:f8:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '184687d6-125a-4b58-bb5b-fdb404088eda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7710fb49-7df6-4e60-ace2-b51c25d1d8ea', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1767.143178] env[63241]: DEBUG oslo.service.loopingcall [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1767.143713] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1767.143941] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea09af47-f1dd-44ec-910e-8053a5ddc9ef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.167334] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1767.167334] env[63241]: value = "task-1820899" [ 1767.167334] env[63241]: _type = "Task" [ 1767.167334] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.174806] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820899, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.233134] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524abf19-87fb-dea3-f392-079525360511, 'name': SearchDatastore_Task, 'duration_secs': 0.010482} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.234074] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.234074] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1767.234074] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.234534] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.234534] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1767.236830] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b7832d9-cf09-4466-8e90-186ccb85c89e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.248940] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1767.249950] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1767.251557] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ee916ff-b307-42e5-9ef8-6b4ce029e641 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.260231] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1767.260231] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c2d13b-31a3-ec6f-c0b6-b145e390ddd0" [ 1767.260231] env[63241]: _type = "Task" [ 1767.260231] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.269919] env[63241]: DEBUG oslo_concurrency.lockutils [None req-361e938b-cf0d-4594-8db7-1ff8151e9ca9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "9d301157-6870-4452-9ae6-0d45c4338886" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.419s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.276716] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c2d13b-31a3-ec6f-c0b6-b145e390ddd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.288436] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820897, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.309335] env[63241]: DEBUG nova.network.neutron [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Updated VIF entry in instance network info cache for port f206ebca-5602-446b-aa53-e4a3d5686739. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1767.309916] env[63241]: DEBUG nova.network.neutron [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Updating instance_info_cache with network_info: [{"id": "f206ebca-5602-446b-aa53-e4a3d5686739", "address": "fa:16:3e:84:c3:dd", "network": {"id": "da181c86-2cd2-4b0b-bf7c-0c2bdbb63796", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-51851294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08e0c8d883004d0fb18507be072eb781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf206ebca-56", "ovs_interfaceid": "f206ebca-5602-446b-aa53-e4a3d5686739", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1767.331642] env[63241]: DEBUG nova.compute.manager [req-0c9f986a-7113-44fe-8b56-b52042265c3b req-adcd1273-3a45-4d3a-b7e4-219e26e0ea82 service nova] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Received event network-changed-7710fb49-7df6-4e60-ace2-b51c25d1d8ea {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1767.331945] env[63241]: DEBUG nova.compute.manager [req-0c9f986a-7113-44fe-8b56-b52042265c3b req-adcd1273-3a45-4d3a-b7e4-219e26e0ea82 service nova] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Refreshing instance network info cache due to event network-changed-7710fb49-7df6-4e60-ace2-b51c25d1d8ea. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1767.332194] env[63241]: DEBUG oslo_concurrency.lockutils [req-0c9f986a-7113-44fe-8b56-b52042265c3b req-adcd1273-3a45-4d3a-b7e4-219e26e0ea82 service nova] Acquiring lock "refresh_cache-46ac69f3-375c-4b60-bc33-83ad8577c4fb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.332352] env[63241]: DEBUG oslo_concurrency.lockutils [req-0c9f986a-7113-44fe-8b56-b52042265c3b req-adcd1273-3a45-4d3a-b7e4-219e26e0ea82 service nova] Acquired lock "refresh_cache-46ac69f3-375c-4b60-bc33-83ad8577c4fb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.332527] env[63241]: DEBUG nova.network.neutron [req-0c9f986a-7113-44fe-8b56-b52042265c3b req-adcd1273-3a45-4d3a-b7e4-219e26e0ea82 service nova] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Refreshing network info cache for port 7710fb49-7df6-4e60-ace2-b51c25d1d8ea {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1767.385516] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1767.386488] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d16ba049-e1fa-4be8-b368-85e40fc41541 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.394400] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1767.394400] env[63241]: value = "task-1820900" [ 1767.394400] env[63241]: _type = "Task" [ 1767.394400] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.408638] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1767.408978] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1767.409884] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6b000a-43c6-4473-87da-855bec112d2f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.422688] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1767.423789] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7ab1e1b-b86e-4734-8fbe-1a020132d8d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.525239] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b961a0ed-0310-4e91-b6a7-152e308a7037 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.533940] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77e0eca-c75b-4c24-9288-86b4723c2c24 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.540262] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1767.540532] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1767.540718] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleting the datastore file [datastore1] b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1767.542619] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cbf1845d-3cf2-4c31-9b98-1ac3f916a758 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.573573] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353ac781-a011-4d15-ab23-5db6cc7369f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.578061] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1767.578061] env[63241]: value = "task-1820902" [ 1767.578061] env[63241]: _type = "Task" [ 1767.578061] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.586944] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d208a5-e32c-45c5-add3-15c8c5d79bf3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.595056] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.606243] env[63241]: DEBUG nova.compute.provider_tree [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1767.679543] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820899, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.775777] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c2d13b-31a3-ec6f-c0b6-b145e390ddd0, 'name': SearchDatastore_Task, 'duration_secs': 0.021705} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.776866] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89110c2b-3a1f-4147-9f0e-f01263ecbd19 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.787766] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1767.787766] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5272fc68-271e-858f-cc9c-4e16b93c3a8e" [ 1767.787766] env[63241]: _type = "Task" [ 1767.787766] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.790045] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820897, 'name': CloneVM_Task, 'duration_secs': 2.514142} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.793482] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Created linked-clone VM from snapshot [ 1767.794363] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319cde2e-cdb7-4904-95eb-b7d559043ca6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.802506] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5272fc68-271e-858f-cc9c-4e16b93c3a8e, 'name': SearchDatastore_Task, 'duration_secs': 0.011013} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.806673] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.806673] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 037f539f-1bf1-4897-81b3-08c377b92211/037f539f-1bf1-4897-81b3-08c377b92211.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1767.806826] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Uploading image 509bfeca-5406-4a2d-b9c1-64cb54f16cd4 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1767.809328] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3728663-24db-4ef6-acff-1d7e48083862 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.812504] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Releasing lock "refresh_cache-037f539f-1bf1-4897-81b3-08c377b92211" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.812615] env[63241]: DEBUG nova.compute.manager [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Received event network-changed-a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1767.812912] env[63241]: DEBUG nova.compute.manager [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Refreshing instance network info cache due to event network-changed-a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1767.813237] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Acquiring lock "refresh_cache-e62f49f0-370d-4b5d-ab43-72e0e6238432" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.813447] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Acquired lock "refresh_cache-e62f49f0-370d-4b5d-ab43-72e0e6238432" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.813667] env[63241]: DEBUG nova.network.neutron [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Refreshing network info cache for port a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1767.817081] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1767.817081] env[63241]: value = "task-1820903" [ 1767.817081] env[63241]: _type = "Task" [ 1767.817081] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.828397] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.834718] env[63241]: DEBUG oslo_vmware.rw_handles [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1767.834718] env[63241]: value = "vm-377176" [ 1767.834718] env[63241]: _type = "VirtualMachine" [ 1767.834718] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1767.837523] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b9ffb7b0-1317-4493-b4bb-e1206fe8a303 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.851843] env[63241]: DEBUG oslo_vmware.rw_handles [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lease: (returnval){ [ 1767.851843] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527be854-ff81-cda4-5a1e-f6b819a67c1b" [ 1767.851843] env[63241]: _type = "HttpNfcLease" [ 1767.851843] env[63241]: } obtained for exporting VM: (result){ [ 1767.851843] env[63241]: value = "vm-377176" [ 1767.851843] env[63241]: _type = "VirtualMachine" [ 1767.851843] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1767.851843] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the lease: (returnval){ [ 1767.851843] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527be854-ff81-cda4-5a1e-f6b819a67c1b" [ 1767.851843] env[63241]: _type = "HttpNfcLease" [ 1767.851843] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1767.860358] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1767.860358] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527be854-ff81-cda4-5a1e-f6b819a67c1b" [ 1767.860358] env[63241]: _type = "HttpNfcLease" [ 1767.860358] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1768.092736] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180508} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.093097] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1768.093304] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1768.093485] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1768.110577] env[63241]: DEBUG nova.scheduler.client.report [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1768.180500] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820899, 'name': CreateVM_Task, 'duration_secs': 0.569749} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.180623] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1768.181537] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1768.181767] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1768.182463] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1768.182912] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65b7538d-e5b6-4b15-bf80-57a3106f277b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.189330] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1768.189330] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e36a38-ef8b-6a2f-5a2a-1c79192558a4" [ 1768.189330] env[63241]: _type = "Task" [ 1768.189330] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.201427] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e36a38-ef8b-6a2f-5a2a-1c79192558a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.283538] env[63241]: DEBUG nova.network.neutron [req-0c9f986a-7113-44fe-8b56-b52042265c3b req-adcd1273-3a45-4d3a-b7e4-219e26e0ea82 service nova] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Updated VIF entry in instance network info cache for port 7710fb49-7df6-4e60-ace2-b51c25d1d8ea. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1768.284396] env[63241]: DEBUG nova.network.neutron [req-0c9f986a-7113-44fe-8b56-b52042265c3b req-adcd1273-3a45-4d3a-b7e4-219e26e0ea82 service nova] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Updating instance_info_cache with network_info: [{"id": "7710fb49-7df6-4e60-ace2-b51c25d1d8ea", "address": "fa:16:3e:63:f8:61", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7710fb49-7d", "ovs_interfaceid": "7710fb49-7df6-4e60-ace2-b51c25d1d8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.333484] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820903, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.347216] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "864175e0-33f0-429f-bdf6-722d9b00da2b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.348107] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.348919] env[63241]: DEBUG nova.compute.manager [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Going to confirm migration 4 {{(pid=63241) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1768.365336] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1768.365336] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527be854-ff81-cda4-5a1e-f6b819a67c1b" [ 1768.365336] env[63241]: _type = "HttpNfcLease" [ 1768.365336] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1768.366263] env[63241]: DEBUG oslo_vmware.rw_handles [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1768.366263] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527be854-ff81-cda4-5a1e-f6b819a67c1b" [ 1768.366263] env[63241]: _type = "HttpNfcLease" [ 1768.366263] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1768.373038] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2f87ac-a852-4a7b-b29a-71c777c73727 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.378608] env[63241]: DEBUG oslo_vmware.rw_handles [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e5ab0f-4d1e-3a8f-256a-1af18fdaa37a/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1768.378900] env[63241]: DEBUG oslo_vmware.rw_handles [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e5ab0f-4d1e-3a8f-256a-1af18fdaa37a/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1768.561740] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-45dd3386-b6ce-4d5a-bc17-dc62c3e0f512 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.627019] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.892s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.630176] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.746s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.630176] env[63241]: DEBUG nova.objects.instance [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lazy-loading 'resources' on Instance uuid 6b4debb5-5a83-45f7-bcf2-36a10f95f644 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1768.655626] env[63241]: INFO nova.scheduler.client.report [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Deleted allocations for instance b7378019-a572-4d4d-a82d-cee13a1b6a88 [ 1768.701138] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e36a38-ef8b-6a2f-5a2a-1c79192558a4, 'name': SearchDatastore_Task, 'duration_secs': 0.065515} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.702346] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.702531] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1768.702852] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1768.703115] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1768.703476] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1768.703631] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad4e1950-7ff9-4bb8-9c11-215a197c5ad0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.714236] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1768.714560] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1768.715764] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53ecd76a-7a6e-4b05-a2c4-cc1720de3ea6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.722265] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1768.722265] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d2bbca-0e36-1016-0446-6e3b9192b65d" [ 1768.722265] env[63241]: _type = "Task" [ 1768.722265] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.732909] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d2bbca-0e36-1016-0446-6e3b9192b65d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.789371] env[63241]: DEBUG oslo_concurrency.lockutils [req-0c9f986a-7113-44fe-8b56-b52042265c3b req-adcd1273-3a45-4d3a-b7e4-219e26e0ea82 service nova] Releasing lock "refresh_cache-46ac69f3-375c-4b60-bc33-83ad8577c4fb" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.830813] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820903, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541058} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.831178] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 037f539f-1bf1-4897-81b3-08c377b92211/037f539f-1bf1-4897-81b3-08c377b92211.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1768.831415] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1768.831971] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce3b1223-48a2-4840-8a07-2d49955f84e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.842730] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1768.842730] env[63241]: value = "task-1820905" [ 1768.842730] env[63241]: _type = "Task" [ 1768.842730] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.852164] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820905, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.951780] env[63241]: DEBUG nova.network.neutron [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Updated VIF entry in instance network info cache for port a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1768.953099] env[63241]: DEBUG nova.network.neutron [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Updating instance_info_cache with network_info: [{"id": "a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec", "address": "fa:16:3e:fe:33:68", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4ff42d0-66", "ovs_interfaceid": "a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.049216] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.049216] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.049216] env[63241]: DEBUG nova.network.neutron [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1769.049216] env[63241]: DEBUG nova.objects.instance [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lazy-loading 'info_cache' on Instance uuid 864175e0-33f0-429f-bdf6-722d9b00da2b {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1769.167652] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d0c7e519-3562-450a-806e-e3a1529df9cb tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "b7378019-a572-4d4d-a82d-cee13a1b6a88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.411s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.233715] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d2bbca-0e36-1016-0446-6e3b9192b65d, 'name': SearchDatastore_Task, 'duration_secs': 0.009902} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.238529] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aa42abc-40c0-484a-8d8c-3c2afceb400f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.251047] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1769.251047] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a6e221-fd59-6e5c-809d-735217ad6129" [ 1769.251047] env[63241]: _type = "Task" [ 1769.251047] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.262434] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a6e221-fd59-6e5c-809d-735217ad6129, 'name': SearchDatastore_Task} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.266330] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1769.266763] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 46ac69f3-375c-4b60-bc33-83ad8577c4fb/46ac69f3-375c-4b60-bc33-83ad8577c4fb.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1769.267637] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eef36679-8e36-4f20-a572-9b6b2be8851f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.274989] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1769.274989] env[63241]: value = "task-1820906" [ 1769.274989] env[63241]: _type = "Task" [ 1769.274989] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.285381] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820906, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.355694] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820905, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10075} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.359777] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1769.361112] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4965c0cf-6d70-4e10-9490-6cace7c41a75 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.390695] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 037f539f-1bf1-4897-81b3-08c377b92211/037f539f-1bf1-4897-81b3-08c377b92211.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1769.394638] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1fffddb-87d1-4ba1-8711-699a85f17fdc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.418848] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1769.418848] env[63241]: value = "task-1820907" [ 1769.418848] env[63241]: _type = "Task" [ 1769.418848] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.430487] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820907, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.456089] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Releasing lock "refresh_cache-e62f49f0-370d-4b5d-ab43-72e0e6238432" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1769.456413] env[63241]: DEBUG nova.compute.manager [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Received event network-vif-plugged-7710fb49-7df6-4e60-ace2-b51c25d1d8ea {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1769.456691] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Acquiring lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.458307] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.458307] env[63241]: DEBUG oslo_concurrency.lockutils [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] Lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.458307] env[63241]: DEBUG nova.compute.manager [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] No waiting events found dispatching network-vif-plugged-7710fb49-7df6-4e60-ace2-b51c25d1d8ea {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1769.458307] env[63241]: WARNING nova.compute.manager [req-1997e22a-9c80-4164-a5e5-5cc89d485870 req-e9baf7b9-3bbe-4fc8-b0ce-b7493536a0bc service nova] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Received unexpected event network-vif-plugged-7710fb49-7df6-4e60-ace2-b51c25d1d8ea for instance with vm_state building and task_state spawning. [ 1769.532061] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de09985-0978-4bd1-870c-cb5963b817ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.543041] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69b3592-ea26-449b-a2af-c2bbe28d7204 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.592481] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a45741-4f85-4581-8401-07803bc54ec5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.606496] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694f7d3d-58f4-4a44-8f84-848058c9b32f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.625226] env[63241]: DEBUG nova.compute.provider_tree [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1769.787695] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820906, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.931048] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820907, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.130211] env[63241]: DEBUG nova.scheduler.client.report [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1770.295679] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820906, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544021} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.296257] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 46ac69f3-375c-4b60-bc33-83ad8577c4fb/46ac69f3-375c-4b60-bc33-83ad8577c4fb.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1770.296602] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1770.296926] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0d473f5-4240-4c41-bc2e-8edcdae83448 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.311108] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1770.311108] env[63241]: value = "task-1820908" [ 1770.311108] env[63241]: _type = "Task" [ 1770.311108] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.321475] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820908, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.430532] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820907, 'name': ReconfigVM_Task, 'duration_secs': 0.584048} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.430833] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 037f539f-1bf1-4897-81b3-08c377b92211/037f539f-1bf1-4897-81b3-08c377b92211.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1770.431574] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5b126bc-9c47-4f3e-8b33-edf6c8bbf45a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.437957] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1770.437957] env[63241]: value = "task-1820909" [ 1770.437957] env[63241]: _type = "Task" [ 1770.437957] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.450612] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820909, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.524403] env[63241]: DEBUG nova.network.neutron [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance_info_cache with network_info: [{"id": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "address": "fa:16:3e:98:e9:3c", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe5471bd-3b", "ovs_interfaceid": "be5471bd-3bc7-4ef4-9ea6-be69b0420644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1770.636476] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.007s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.668709] env[63241]: INFO nova.scheduler.client.report [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted allocations for instance 6b4debb5-5a83-45f7-bcf2-36a10f95f644 [ 1770.825687] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820908, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12293} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.825687] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1770.826207] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606202fd-3415-4302-9dc6-b3925d1fd075 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.850391] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 46ac69f3-375c-4b60-bc33-83ad8577c4fb/46ac69f3-375c-4b60-bc33-83ad8577c4fb.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1770.850728] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-391fa8e7-aefb-4b93-b562-8b75e307592a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.870993] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1770.870993] env[63241]: value = "task-1820910" [ 1770.870993] env[63241]: _type = "Task" [ 1770.870993] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.880037] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820910, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.948795] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820909, 'name': Rename_Task, 'duration_secs': 0.223335} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.949252] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1770.949565] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9384d639-558e-4346-9a14-4f8c745c927e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.956549] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1770.956549] env[63241]: value = "task-1820911" [ 1770.956549] env[63241]: _type = "Task" [ 1770.956549] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.964891] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820911, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.027518] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "refresh_cache-864175e0-33f0-429f-bdf6-722d9b00da2b" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.028082] env[63241]: DEBUG nova.objects.instance [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lazy-loading 'migration_context' on Instance uuid 864175e0-33f0-429f-bdf6-722d9b00da2b {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1771.177772] env[63241]: DEBUG oslo_concurrency.lockutils [None req-111eaae1-b13c-4cc9-9316-e9c91daa4f7c tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "6b4debb5-5a83-45f7-bcf2-36a10f95f644" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.979s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.383689] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820910, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.468472] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820911, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.530936] env[63241]: DEBUG nova.objects.base [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Object Instance<864175e0-33f0-429f-bdf6-722d9b00da2b> lazy-loaded attributes: info_cache,migration_context {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1771.531919] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3d1a29-b3d0-43f9-8af3-831db3395cc5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.551693] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71d1dfb4-6e26-4e44-9319-f7e37527e088 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.559169] env[63241]: DEBUG oslo_vmware.api [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1771.559169] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52505d35-c3a7-262f-945f-4f3cf8735e1e" [ 1771.559169] env[63241]: _type = "Task" [ 1771.559169] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.570188] env[63241]: DEBUG oslo_vmware.api [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52505d35-c3a7-262f-945f-4f3cf8735e1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.884858] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820910, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.969371] env[63241]: DEBUG oslo_vmware.api [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1820911, 'name': PowerOnVM_Task, 'duration_secs': 0.667733} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.969668] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1771.969895] env[63241]: INFO nova.compute.manager [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Took 9.33 seconds to spawn the instance on the hypervisor. [ 1771.970091] env[63241]: DEBUG nova.compute.manager [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1771.970916] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0c2898-21a1-4798-b4a9-5cd5a29dc02b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.070727] env[63241]: DEBUG oslo_vmware.api [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52505d35-c3a7-262f-945f-4f3cf8735e1e, 'name': SearchDatastore_Task, 'duration_secs': 0.009833} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.071052] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.071347] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.382880] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820910, 'name': ReconfigVM_Task, 'duration_secs': 1.239709} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.383181] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 46ac69f3-375c-4b60-bc33-83ad8577c4fb/46ac69f3-375c-4b60-bc33-83ad8577c4fb.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1772.383825] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5e0996e-e3c3-4f4b-bff0-fef95561bc84 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.390068] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1772.390068] env[63241]: value = "task-1820912" [ 1772.390068] env[63241]: _type = "Task" [ 1772.390068] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.398869] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820912, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.489777] env[63241]: INFO nova.compute.manager [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Took 25.56 seconds to build instance. [ 1772.783732] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f921a8dc-4558-4c67-b270-2c28e3c649f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.792122] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6261c31-c164-425d-9283-e96d6d3225bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.824155] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bafc91d-917d-41e4-b758-47020125dd74 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.832861] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1e7114-9342-4575-8440-7bd1780e585a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.848837] env[63241]: DEBUG nova.compute.provider_tree [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1772.902095] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820912, 'name': Rename_Task, 'duration_secs': 0.214312} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.902488] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1772.902766] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d922e4c0-628f-48e2-8ca2-71d9a5ef8c6e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.909492] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1772.909492] env[63241]: value = "task-1820913" [ 1772.909492] env[63241]: _type = "Task" [ 1772.909492] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.918535] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820913, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.992318] env[63241]: DEBUG oslo_concurrency.lockutils [None req-92a20e90-f159-4a58-a7a2-0322907d6d0a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.072s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.352599] env[63241]: DEBUG nova.scheduler.client.report [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1773.420846] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820913, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.920542] env[63241]: DEBUG oslo_vmware.api [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820913, 'name': PowerOnVM_Task, 'duration_secs': 0.593156} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.920803] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1773.921076] env[63241]: INFO nova.compute.manager [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Took 8.89 seconds to spawn the instance on the hypervisor. [ 1773.921218] env[63241]: DEBUG nova.compute.manager [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1773.922054] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2974a7b4-a4e8-45fd-b435-10e7af5b13a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.364079] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.292s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.440044] env[63241]: INFO nova.compute.manager [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Took 26.69 seconds to build instance. [ 1774.923143] env[63241]: INFO nova.scheduler.client.report [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted allocation for migration 4888f213-32fb-497d-9453-837f8f1b279f [ 1774.942702] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c47c6bc1-7baf-471e-9f47-a99f44a0eeb6 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.204s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.429556] env[63241]: DEBUG oslo_concurrency.lockutils [None req-51c38248-ba1d-4214-bc95-565cb72e9ae9 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.081s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.190059] env[63241]: DEBUG nova.virt.hardware [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1777.190450] env[63241]: DEBUG nova.virt.hardware [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1777.190607] env[63241]: DEBUG nova.virt.hardware [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1777.190811] env[63241]: DEBUG nova.virt.hardware [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1777.190934] env[63241]: DEBUG nova.virt.hardware [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1777.191094] env[63241]: DEBUG nova.virt.hardware [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1777.191312] env[63241]: DEBUG nova.virt.hardware [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1777.191476] env[63241]: DEBUG nova.virt.hardware [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1777.191645] env[63241]: DEBUG nova.virt.hardware [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1777.191808] env[63241]: DEBUG nova.virt.hardware [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1777.191982] env[63241]: DEBUG nova.virt.hardware [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1777.194102] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe0b73f-8739-4c9c-a9ac-921dae54466f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.203827] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523e9f50-21f9-33ec-86d9-f9b082ee902d/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1777.205241] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8c9f1d-3c18-4f94-a5f5-81ac6785d1b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.210010] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b3e97b-ed16-44a0-ae8a-dbc24db7f948 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.224197] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:74:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6b9cb89-6a29-44b5-91b1-0591266c582b', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1777.232367] env[63241]: DEBUG oslo.service.loopingcall [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1777.234166] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1777.234444] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523e9f50-21f9-33ec-86d9-f9b082ee902d/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1777.234601] env[63241]: ERROR oslo_vmware.rw_handles [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523e9f50-21f9-33ec-86d9-f9b082ee902d/disk-0.vmdk due to incomplete transfer. [ 1777.234808] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b5fadad-90d1-4d46-8d25-73ff8b6e596b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.248643] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8a2b8deb-c583-4e9d-9d02-949402661232 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.256755] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1777.256755] env[63241]: value = "task-1820914" [ 1777.256755] env[63241]: _type = "Task" [ 1777.256755] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.258025] env[63241]: DEBUG oslo_vmware.rw_handles [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523e9f50-21f9-33ec-86d9-f9b082ee902d/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1777.258226] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Uploaded image f4486edd-4cdd-43f5-bb2b-0002ef417a28 to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1777.260774] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1777.264055] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-db9ae2b7-843a-4a00-a5b6-9ae4d3df805e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.271624] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820914, 'name': CreateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.273120] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1777.273120] env[63241]: value = "task-1820915" [ 1777.273120] env[63241]: _type = "Task" [ 1777.273120] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.281878] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820915, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.770247] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820914, 'name': CreateVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.782474] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820915, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.207642] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.207642] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.211708] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquiring lock "91b65576-47be-4a92-a6fd-8380532c8e1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.211708] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Lock "91b65576-47be-4a92-a6fd-8380532c8e1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.269905] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820914, 'name': CreateVM_Task} progress is 15%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.283318] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820915, 'name': Destroy_Task, 'duration_secs': 0.625785} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.283642] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Destroyed the VM [ 1778.283865] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1778.284144] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-856abc90-d2b4-4d6f-b1df-44d589b67ae2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.291596] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1778.291596] env[63241]: value = "task-1820916" [ 1778.291596] env[63241]: _type = "Task" [ 1778.291596] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.300384] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820916, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.635940] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "12b99b2b-56f0-4ce9-8897-f429c2084f38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.635940] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "12b99b2b-56f0-4ce9-8897-f429c2084f38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.711405] env[63241]: DEBUG nova.compute.manager [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1778.714877] env[63241]: DEBUG nova.compute.manager [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1778.772930] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820914, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.802309] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820916, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.135798] env[63241]: DEBUG nova.compute.manager [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1779.245591] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.245892] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.247606] env[63241]: INFO nova.compute.claims [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1779.254631] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.276379] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820914, 'name': CreateVM_Task, 'duration_secs': 1.650406} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.276737] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1779.277415] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.277617] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.277941] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1779.278545] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2714026b-8d0a-4969-9208-93782533cba3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.286952] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1779.286952] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52da7418-519b-92ef-8378-54d992fa667d" [ 1779.286952] env[63241]: _type = "Task" [ 1779.286952] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.298081] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52da7418-519b-92ef-8378-54d992fa667d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.309795] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820916, 'name': RemoveSnapshot_Task, 'duration_secs': 0.664493} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.310056] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1779.310491] env[63241]: DEBUG nova.compute.manager [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1779.311335] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e881f9-4b36-423a-8a97-c6c80f77e2c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.524105] env[63241]: DEBUG oslo_vmware.rw_handles [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e5ab0f-4d1e-3a8f-256a-1af18fdaa37a/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1779.524105] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b215bf5-c9a8-4feb-8403-b1649c27f4fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.531585] env[63241]: DEBUG oslo_vmware.rw_handles [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e5ab0f-4d1e-3a8f-256a-1af18fdaa37a/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1779.531948] env[63241]: ERROR oslo_vmware.rw_handles [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e5ab0f-4d1e-3a8f-256a-1af18fdaa37a/disk-0.vmdk due to incomplete transfer. [ 1779.532335] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-658c1b05-ab26-463c-91d3-b95f838c732e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.539859] env[63241]: DEBUG oslo_vmware.rw_handles [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e5ab0f-4d1e-3a8f-256a-1af18fdaa37a/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1779.540641] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Uploaded image 509bfeca-5406-4a2d-b9c1-64cb54f16cd4 to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1779.542255] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1779.542646] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ef50bb6d-f15c-40fb-b294-988ec6e2470c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.551277] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1779.551277] env[63241]: value = "task-1820917" [ 1779.551277] env[63241]: _type = "Task" [ 1779.551277] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.560757] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820917, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.656154] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.772611] env[63241]: DEBUG oslo_concurrency.lockutils [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "864175e0-33f0-429f-bdf6-722d9b00da2b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.773273] env[63241]: DEBUG oslo_concurrency.lockutils [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.773709] env[63241]: DEBUG oslo_concurrency.lockutils [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "864175e0-33f0-429f-bdf6-722d9b00da2b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.773956] env[63241]: DEBUG oslo_concurrency.lockutils [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.774215] env[63241]: DEBUG oslo_concurrency.lockutils [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.776464] env[63241]: INFO nova.compute.manager [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Terminating instance [ 1779.778916] env[63241]: DEBUG nova.compute.manager [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1779.779154] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1779.780148] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7122d3-7255-496b-aaeb-a4b4fd8669b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.795957] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1779.795957] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8721d28-90a8-4f0f-8396-8232cbeb8dd4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.804477] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52da7418-519b-92ef-8378-54d992fa667d, 'name': SearchDatastore_Task, 'duration_secs': 0.011156} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.805856] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.806420] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1779.806732] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.807018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.807236] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1779.807661] env[63241]: DEBUG oslo_vmware.api [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1779.807661] env[63241]: value = "task-1820918" [ 1779.807661] env[63241]: _type = "Task" [ 1779.807661] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.807938] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aea24def-90df-45e0-9cab-0d7ae3ec3740 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.826819] env[63241]: DEBUG oslo_vmware.api [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.828583] env[63241]: INFO nova.compute.manager [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Shelve offloading [ 1779.832019] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1779.832019] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1779.832019] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1779.832019] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8f7db3f-d7e9-4c00-bc98-b59ce74e3615 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.834143] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dda5398a-53cf-4a95-9b5a-ecd9de5ffeca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.839791] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1779.839791] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521eb894-3405-69ed-7b23-69ffbb120f26" [ 1779.839791] env[63241]: _type = "Task" [ 1779.839791] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.846045] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1779.846045] env[63241]: value = "task-1820919" [ 1779.846045] env[63241]: _type = "Task" [ 1779.846045] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.854042] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521eb894-3405-69ed-7b23-69ffbb120f26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.860959] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1779.861286] env[63241]: DEBUG nova.compute.manager [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1779.862237] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82de6724-5f8d-402f-99c2-17a790717d62 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.871103] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "49d350ff-4932-4759-a3fa-53274c484ae6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.872362] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "49d350ff-4932-4759-a3fa-53274c484ae6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.874967] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.875241] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.875480] env[63241]: DEBUG nova.network.neutron [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1779.876770] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.877056] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.877627] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.877687] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.877950] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.880304] env[63241]: INFO nova.compute.manager [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Terminating instance [ 1779.882133] env[63241]: DEBUG nova.compute.manager [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1779.884333] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1779.884333] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd98912-db9a-42d8-8530-9b47f81a633d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.894173] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1779.894761] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a14fa97f-ebc3-4d38-9eb8-bad990f67cfd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.902689] env[63241]: DEBUG oslo_vmware.api [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1779.902689] env[63241]: value = "task-1820920" [ 1779.902689] env[63241]: _type = "Task" [ 1779.902689] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.918524] env[63241]: DEBUG oslo_vmware.api [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.042515] env[63241]: DEBUG nova.compute.manager [req-0c0d6cf9-afe3-41e8-9e60-b770d741c1e8 req-5f4dc291-7b08-48f9-8fa8-bce342a1eef3 service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Received event network-changed-f206ebca-5602-446b-aa53-e4a3d5686739 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1780.043115] env[63241]: DEBUG nova.compute.manager [req-0c0d6cf9-afe3-41e8-9e60-b770d741c1e8 req-5f4dc291-7b08-48f9-8fa8-bce342a1eef3 service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Refreshing instance network info cache due to event network-changed-f206ebca-5602-446b-aa53-e4a3d5686739. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1780.043426] env[63241]: DEBUG oslo_concurrency.lockutils [req-0c0d6cf9-afe3-41e8-9e60-b770d741c1e8 req-5f4dc291-7b08-48f9-8fa8-bce342a1eef3 service nova] Acquiring lock "refresh_cache-037f539f-1bf1-4897-81b3-08c377b92211" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.043716] env[63241]: DEBUG oslo_concurrency.lockutils [req-0c0d6cf9-afe3-41e8-9e60-b770d741c1e8 req-5f4dc291-7b08-48f9-8fa8-bce342a1eef3 service nova] Acquired lock "refresh_cache-037f539f-1bf1-4897-81b3-08c377b92211" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.043960] env[63241]: DEBUG nova.network.neutron [req-0c0d6cf9-afe3-41e8-9e60-b770d741c1e8 req-5f4dc291-7b08-48f9-8fa8-bce342a1eef3 service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Refreshing network info cache for port f206ebca-5602-446b-aa53-e4a3d5686739 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1780.058960] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820917, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.320104] env[63241]: DEBUG oslo_vmware.api [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820918, 'name': PowerOffVM_Task, 'duration_secs': 0.277862} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.322687] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1780.322894] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1780.323354] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31f815c2-3bb0-49d2-9fbb-372899f1f4bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.350252] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521eb894-3405-69ed-7b23-69ffbb120f26, 'name': SearchDatastore_Task, 'duration_secs': 0.012064} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.351051] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c78bfe3f-841d-49cd-9888-2bb4908a9fe7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.356028] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1780.356028] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5271988f-fe11-add7-9db9-c75d5e8def96" [ 1780.356028] env[63241]: _type = "Task" [ 1780.356028] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.365605] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5271988f-fe11-add7-9db9-c75d5e8def96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.380991] env[63241]: DEBUG nova.compute.manager [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1780.417189] env[63241]: DEBUG oslo_vmware.api [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820920, 'name': PowerOffVM_Task, 'duration_secs': 0.228928} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.419877] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1780.419877] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1780.420076] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abec0902-81ce-47ad-abc4-c11a9fca87af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.510576] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abda043-1768-408f-818b-a1af78af760a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.518613] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e9c2d1-ff42-4d6c-af16-9b775e0b07ef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.555780] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d277886-4006-489d-bd22-48375d3b7044 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.563841] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820917, 'name': Destroy_Task, 'duration_secs': 0.746376} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.566477] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Destroyed the VM [ 1780.566477] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1780.567131] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b02b15b3-c065-42a0-b401-5b1c00baf157 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.570026] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006c9fd0-fea9-4444-a5a6-cf8b95a088ee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.588265] env[63241]: DEBUG nova.compute.provider_tree [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1780.591996] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1780.591996] env[63241]: value = "task-1820923" [ 1780.591996] env[63241]: _type = "Task" [ 1780.591996] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.600069] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.600568] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.605016] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820923, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.653554] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1780.653554] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1780.653752] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleting the datastore file [datastore1] 864175e0-33f0-429f-bdf6-722d9b00da2b {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1780.653892] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0688e3f6-705d-48fd-929e-09b481cfd399 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.662731] env[63241]: DEBUG oslo_vmware.api [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1780.662731] env[63241]: value = "task-1820924" [ 1780.662731] env[63241]: _type = "Task" [ 1780.662731] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.673275] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1780.673275] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1780.673275] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleting the datastore file [datastore1] 46ac69f3-375c-4b60-bc33-83ad8577c4fb {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1780.674206] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b64587f9-9664-425e-9bd7-d4bf109a7b60 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.679017] env[63241]: DEBUG oslo_vmware.api [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.682468] env[63241]: DEBUG oslo_vmware.api [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1780.682468] env[63241]: value = "task-1820925" [ 1780.682468] env[63241]: _type = "Task" [ 1780.682468] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.691773] env[63241]: DEBUG oslo_vmware.api [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.734263] env[63241]: DEBUG nova.network.neutron [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updating instance_info_cache with network_info: [{"id": "24131a23-55e1-4bd6-8813-5768da05438f", "address": "fa:16:3e:fa:8e:d4", "network": {"id": "355e2d29-1968-4065-94a6-f9e5946a75c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-154610021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d1a62ae45c74a7ba071363005b3a52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24131a23-55", "ovs_interfaceid": "24131a23-55e1-4bd6-8813-5768da05438f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.801216] env[63241]: DEBUG nova.network.neutron [req-0c0d6cf9-afe3-41e8-9e60-b770d741c1e8 req-5f4dc291-7b08-48f9-8fa8-bce342a1eef3 service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Updated VIF entry in instance network info cache for port f206ebca-5602-446b-aa53-e4a3d5686739. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1780.801565] env[63241]: DEBUG nova.network.neutron [req-0c0d6cf9-afe3-41e8-9e60-b770d741c1e8 req-5f4dc291-7b08-48f9-8fa8-bce342a1eef3 service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Updating instance_info_cache with network_info: [{"id": "f206ebca-5602-446b-aa53-e4a3d5686739", "address": "fa:16:3e:84:c3:dd", "network": {"id": "da181c86-2cd2-4b0b-bf7c-0c2bdbb63796", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-51851294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08e0c8d883004d0fb18507be072eb781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf206ebca-56", "ovs_interfaceid": "f206ebca-5602-446b-aa53-e4a3d5686739", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.869026] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5271988f-fe11-add7-9db9-c75d5e8def96, 'name': SearchDatastore_Task, 'duration_secs': 0.009672} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.869026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.869026] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e/b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1780.869245] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9de29e9d-3b64-4786-b01d-52dfeb35dd0a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.875882] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1780.875882] env[63241]: value = "task-1820926" [ 1780.875882] env[63241]: _type = "Task" [ 1780.875882] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.884675] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820926, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.899932] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.094783] env[63241]: DEBUG nova.scheduler.client.report [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1781.110656] env[63241]: DEBUG nova.compute.manager [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1781.113698] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820923, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.175055] env[63241]: DEBUG oslo_vmware.api [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820924, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162987} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.175335] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1781.175552] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1781.175740] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1781.175918] env[63241]: INFO nova.compute.manager [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Took 1.40 seconds to destroy the instance on the hypervisor. [ 1781.176189] env[63241]: DEBUG oslo.service.loopingcall [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1781.176392] env[63241]: DEBUG nova.compute.manager [-] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1781.176485] env[63241]: DEBUG nova.network.neutron [-] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1781.194220] env[63241]: DEBUG oslo_vmware.api [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1820925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190567} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.194322] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1781.194469] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1781.195072] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1781.195072] env[63241]: INFO nova.compute.manager [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1781.195072] env[63241]: DEBUG oslo.service.loopingcall [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1781.195272] env[63241]: DEBUG nova.compute.manager [-] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1781.195973] env[63241]: DEBUG nova.network.neutron [-] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1781.238383] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.304614] env[63241]: DEBUG oslo_concurrency.lockutils [req-0c0d6cf9-afe3-41e8-9e60-b770d741c1e8 req-5f4dc291-7b08-48f9-8fa8-bce342a1eef3 service nova] Releasing lock "refresh_cache-037f539f-1bf1-4897-81b3-08c377b92211" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.387564] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820926, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45545} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.387956] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e/b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1781.388591] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1781.388957] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6356ae0-2b09-44a7-95a9-39ee72a84063 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.395959] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1781.395959] env[63241]: value = "task-1820927" [ 1781.395959] env[63241]: _type = "Task" [ 1781.395959] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.404083] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.476561] env[63241]: DEBUG nova.compute.manager [req-dd980842-354e-4848-9bdd-16fe5fa6638c req-43580c80-0f3c-4846-b8fd-60931f2e2113 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Received event network-vif-deleted-be5471bd-3bc7-4ef4-9ea6-be69b0420644 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1781.476904] env[63241]: INFO nova.compute.manager [req-dd980842-354e-4848-9bdd-16fe5fa6638c req-43580c80-0f3c-4846-b8fd-60931f2e2113 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Neutron deleted interface be5471bd-3bc7-4ef4-9ea6-be69b0420644; detaching it from the instance and deleting it from the info cache [ 1781.476956] env[63241]: DEBUG nova.network.neutron [req-dd980842-354e-4848-9bdd-16fe5fa6638c req-43580c80-0f3c-4846-b8fd-60931f2e2113 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.605504] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.359s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.606032] env[63241]: DEBUG nova.compute.manager [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1781.609208] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.355s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.610663] env[63241]: INFO nova.compute.claims [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1781.625962] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820923, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.635609] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1781.636568] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeddcf1a-c1df-4217-a496-38f2d1e87bd3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.644586] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1781.645912] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.646166] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab3838e0-d721-477b-96e2-e112433756fa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.757767] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1781.758042] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1781.758252] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Deleting the datastore file [datastore1] 0e4a3b3a-4464-404f-9154-1ab6f97ae951 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1781.758521] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33b700df-c94f-450b-8703-5e09d712d0a4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.765013] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1781.765013] env[63241]: value = "task-1820929" [ 1781.765013] env[63241]: _type = "Task" [ 1781.765013] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.772946] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820929, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.910423] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069771} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.910777] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1781.915567] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7428f676-c313-437d-94a3-a734c7323c4f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.947458] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e/b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1781.947976] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f5692a0-e04f-475b-83cd-bcc19bb1dfec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.963934] env[63241]: DEBUG nova.network.neutron [-] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.971081] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1781.971081] env[63241]: value = "task-1820930" [ 1781.971081] env[63241]: _type = "Task" [ 1781.971081] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.983051] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820930, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.983165] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-103ae3d6-904b-4ae4-bbee-dc2be97a7033 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.993929] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c073f025-3cca-49a1-ad14-7a4234019615 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.025325] env[63241]: DEBUG nova.compute.manager [req-dd980842-354e-4848-9bdd-16fe5fa6638c req-43580c80-0f3c-4846-b8fd-60931f2e2113 service nova] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Detach interface failed, port_id=be5471bd-3bc7-4ef4-9ea6-be69b0420644, reason: Instance 864175e0-33f0-429f-bdf6-722d9b00da2b could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1782.031930] env[63241]: DEBUG nova.network.neutron [-] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.071441] env[63241]: DEBUG nova.compute.manager [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Received event network-vif-deleted-7710fb49-7df6-4e60-ace2-b51c25d1d8ea {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1782.071725] env[63241]: DEBUG nova.compute.manager [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Received event network-vif-unplugged-24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1782.071934] env[63241]: DEBUG oslo_concurrency.lockutils [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] Acquiring lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.072153] env[63241]: DEBUG oslo_concurrency.lockutils [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.072323] env[63241]: DEBUG oslo_concurrency.lockutils [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.072490] env[63241]: DEBUG nova.compute.manager [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] No waiting events found dispatching network-vif-unplugged-24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1782.072658] env[63241]: WARNING nova.compute.manager [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Received unexpected event network-vif-unplugged-24131a23-55e1-4bd6-8813-5768da05438f for instance with vm_state shelved and task_state shelving_offloading. [ 1782.072817] env[63241]: DEBUG nova.compute.manager [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Received event network-changed-24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1782.072972] env[63241]: DEBUG nova.compute.manager [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Refreshing instance network info cache due to event network-changed-24131a23-55e1-4bd6-8813-5768da05438f. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1782.073201] env[63241]: DEBUG oslo_concurrency.lockutils [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] Acquiring lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1782.073336] env[63241]: DEBUG oslo_concurrency.lockutils [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] Acquired lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1782.073492] env[63241]: DEBUG nova.network.neutron [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Refreshing network info cache for port 24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1782.112212] env[63241]: DEBUG nova.compute.utils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1782.113469] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820923, 'name': RemoveSnapshot_Task, 'duration_secs': 1.436565} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.113730] env[63241]: DEBUG nova.compute.manager [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1782.113893] env[63241]: DEBUG nova.network.neutron [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1782.117501] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1782.117744] env[63241]: DEBUG nova.compute.manager [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1782.119064] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2cbacd-d731-4c64-bdc3-1dbc2060d1b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.174176] env[63241]: DEBUG nova.policy [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54dc853b6f204a75ae7612f9fbd2d1f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecafb0abbdc74501b22b20b797c4c60c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1782.278202] env[63241]: DEBUG oslo_vmware.api [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1820929, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134383} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.278550] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1782.278854] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1782.278933] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1782.303517] env[63241]: INFO nova.scheduler.client.report [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Deleted allocations for instance 0e4a3b3a-4464-404f-9154-1ab6f97ae951 [ 1782.467787] env[63241]: INFO nova.compute.manager [-] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Took 1.29 seconds to deallocate network for instance. [ 1782.484372] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820930, 'name': ReconfigVM_Task, 'duration_secs': 0.375635} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.484638] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Reconfigured VM instance instance-0000005a to attach disk [datastore1] b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e/b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1782.485312] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2446ee90-e7bf-41df-add0-149f48ac9445 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.492228] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1782.492228] env[63241]: value = "task-1820931" [ 1782.492228] env[63241]: _type = "Task" [ 1782.492228] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.500117] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820931, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.512475] env[63241]: DEBUG nova.network.neutron [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Successfully created port: 6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1782.534738] env[63241]: INFO nova.compute.manager [-] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Took 1.34 seconds to deallocate network for instance. [ 1782.621932] env[63241]: DEBUG nova.compute.manager [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1782.630886] env[63241]: INFO nova.compute.manager [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Shelve offloading [ 1782.632510] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1782.632763] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29aa3268-37df-4ea2-8980-1c30d803fa2c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.639454] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1782.639454] env[63241]: value = "task-1820932" [ 1782.639454] env[63241]: _type = "Task" [ 1782.639454] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.648216] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820932, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.808769] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.831919] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2a9829-96fb-4bb2-b8c6-b76fda3af8b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.839693] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d023eb-d9d6-4dc7-8070-05d63b404931 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.874134] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e19528-8c55-4100-9bd6-f996d3dfe109 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.881825] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050f9113-754f-4af2-a08f-d0c842cfc394 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.894917] env[63241]: DEBUG nova.compute.provider_tree [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1782.907981] env[63241]: DEBUG nova.network.neutron [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updated VIF entry in instance network info cache for port 24131a23-55e1-4bd6-8813-5768da05438f. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1782.908339] env[63241]: DEBUG nova.network.neutron [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updating instance_info_cache with network_info: [{"id": "24131a23-55e1-4bd6-8813-5768da05438f", "address": "fa:16:3e:fa:8e:d4", "network": {"id": "355e2d29-1968-4065-94a6-f9e5946a75c5", "bridge": null, "label": "tempest-ServersNegativeTestJSON-154610021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d1a62ae45c74a7ba071363005b3a52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap24131a23-55", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.981018] env[63241]: DEBUG oslo_concurrency.lockutils [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.002670] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820931, 'name': Rename_Task, 'duration_secs': 0.150107} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.003048] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1783.003365] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04708bcf-2423-47fd-ac69-854f28d34fa8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.010181] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1783.010181] env[63241]: value = "task-1820933" [ 1783.010181] env[63241]: _type = "Task" [ 1783.010181] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.018110] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820933, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.041354] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.150218] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1783.150429] env[63241]: DEBUG nova.compute.manager [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1783.151214] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2dda578-7acd-450d-96c4-2ff1dc40e902 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.157184] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.157349] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.157553] env[63241]: DEBUG nova.network.neutron [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1783.398080] env[63241]: DEBUG nova.scheduler.client.report [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1783.411110] env[63241]: DEBUG oslo_concurrency.lockutils [req-6efce177-eb31-4399-b5c2-f07cb1c97eff req-14a8baf5-03c4-4141-a7c3-13e12f0f7db5 service nova] Releasing lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.520479] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820933, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.634402] env[63241]: DEBUG nova.compute.manager [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1783.657009] env[63241]: DEBUG nova.virt.hardware [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1783.657328] env[63241]: DEBUG nova.virt.hardware [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1783.657490] env[63241]: DEBUG nova.virt.hardware [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1783.657760] env[63241]: DEBUG nova.virt.hardware [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1783.657917] env[63241]: DEBUG nova.virt.hardware [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1783.658081] env[63241]: DEBUG nova.virt.hardware [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1783.658299] env[63241]: DEBUG nova.virt.hardware [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1783.658460] env[63241]: DEBUG nova.virt.hardware [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1783.658681] env[63241]: DEBUG nova.virt.hardware [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1783.658873] env[63241]: DEBUG nova.virt.hardware [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1783.659063] env[63241]: DEBUG nova.virt.hardware [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1783.661791] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8d4170-b042-42b7-b999-fe741b07e551 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.670476] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750a87ce-906e-4ad0-8817-95e299803954 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.903345] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.903935] env[63241]: DEBUG nova.compute.manager [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1783.907962] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.251s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.908880] env[63241]: INFO nova.compute.claims [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1784.000225] env[63241]: DEBUG nova.network.neutron [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updating instance_info_cache with network_info: [{"id": "7a0be842-edfe-48ff-9275-dbb260c7e781", "address": "fa:16:3e:aa:cc:cf", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0be842-ed", "ovs_interfaceid": "7a0be842-edfe-48ff-9275-dbb260c7e781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.022102] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820933, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.320255] env[63241]: DEBUG nova.network.neutron [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Successfully updated port: 6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1784.329906] env[63241]: DEBUG nova.compute.manager [req-c3e9e94a-8ad9-42a9-a4c9-51f7f73fcd42 req-2865288a-90da-42c5-9c78-74d84d6d6b9d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Received event network-vif-plugged-6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1784.330102] env[63241]: DEBUG oslo_concurrency.lockutils [req-c3e9e94a-8ad9-42a9-a4c9-51f7f73fcd42 req-2865288a-90da-42c5-9c78-74d84d6d6b9d service nova] Acquiring lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.331260] env[63241]: DEBUG oslo_concurrency.lockutils [req-c3e9e94a-8ad9-42a9-a4c9-51f7f73fcd42 req-2865288a-90da-42c5-9c78-74d84d6d6b9d service nova] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.331260] env[63241]: DEBUG oslo_concurrency.lockutils [req-c3e9e94a-8ad9-42a9-a4c9-51f7f73fcd42 req-2865288a-90da-42c5-9c78-74d84d6d6b9d service nova] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.331260] env[63241]: DEBUG nova.compute.manager [req-c3e9e94a-8ad9-42a9-a4c9-51f7f73fcd42 req-2865288a-90da-42c5-9c78-74d84d6d6b9d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] No waiting events found dispatching network-vif-plugged-6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1784.331260] env[63241]: WARNING nova.compute.manager [req-c3e9e94a-8ad9-42a9-a4c9-51f7f73fcd42 req-2865288a-90da-42c5-9c78-74d84d6d6b9d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Received unexpected event network-vif-plugged-6f97669d-a2c6-4625-a1b6-374f5565ebb0 for instance with vm_state building and task_state spawning. [ 1784.413949] env[63241]: DEBUG nova.compute.utils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1784.417370] env[63241]: DEBUG nova.compute.manager [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1784.417550] env[63241]: DEBUG nova.network.neutron [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1784.503021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.507077] env[63241]: DEBUG nova.policy [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55a203eb86bb4f7a9da3c78f292fd0a3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b99c34b76394424983c77aa10d41233d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1784.524248] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820933, 'name': PowerOnVM_Task, 'duration_secs': 1.081017} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.524533] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1784.524745] env[63241]: DEBUG nova.compute.manager [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1784.525532] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354782c8-f38e-46d3-b20b-373ec0725f6e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.824083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.824263] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.824425] env[63241]: DEBUG nova.network.neutron [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1784.842859] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.917707] env[63241]: DEBUG nova.compute.manager [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1785.020301] env[63241]: DEBUG nova.network.neutron [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Successfully created port: fd9d90f6-0bb7-4052-9f8e-2ad931e916ed {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1785.035167] env[63241]: INFO nova.compute.manager [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] bringing vm to original state: 'stopped' [ 1785.122684] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1785.124312] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a8cef2-12c7-44b8-a57b-da38df654f53 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.135082] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1785.138386] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71294b63-6f24-49d5-84ca-bd99d942bc78 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.201810] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e82d81-8498-4f53-b35c-77813b2710d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.209063] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1ec128-c484-45b5-83d1-ad62df29bd38 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.239113] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8271c7a-2ea0-4a73-b1af-086b983d1611 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.246519] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0e8634-fd1a-4504-a729-e0831b813b37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.259611] env[63241]: DEBUG nova.compute.provider_tree [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1785.391868] env[63241]: DEBUG nova.network.neutron [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1785.762957] env[63241]: DEBUG nova.scheduler.client.report [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1785.832022] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1785.832022] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1785.832022] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleting the datastore file [datastore1] 01af6dc5-e0e7-4f8b-ad07-73af80c32577 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1785.832022] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0d09d28-d691-486f-9ff8-edfd2b9ae5d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.838284] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1785.838284] env[63241]: value = "task-1820935" [ 1785.838284] env[63241]: _type = "Task" [ 1785.838284] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.848260] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.860416] env[63241]: DEBUG nova.network.neutron [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updating instance_info_cache with network_info: [{"id": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "address": "fa:16:3e:0e:96:49", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f97669d-a2", "ovs_interfaceid": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.932131] env[63241]: DEBUG nova.compute.manager [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1785.958755] env[63241]: DEBUG nova.virt.hardware [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1785.959049] env[63241]: DEBUG nova.virt.hardware [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1785.959211] env[63241]: DEBUG nova.virt.hardware [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1785.959393] env[63241]: DEBUG nova.virt.hardware [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1785.959535] env[63241]: DEBUG nova.virt.hardware [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1785.959682] env[63241]: DEBUG nova.virt.hardware [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1785.959880] env[63241]: DEBUG nova.virt.hardware [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1785.960122] env[63241]: DEBUG nova.virt.hardware [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1785.960304] env[63241]: DEBUG nova.virt.hardware [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1785.960464] env[63241]: DEBUG nova.virt.hardware [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1785.960633] env[63241]: DEBUG nova.virt.hardware [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1785.961481] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bb792b-5a83-4419-bd6b-c2ec4ded6bc3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.969327] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a898faf4-7be4-431d-8e37-27121bb88e4e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.044670] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.044941] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.045146] env[63241]: DEBUG nova.compute.manager [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1786.046052] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8463040e-250a-492a-b9dd-20b49b236954 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.053017] env[63241]: DEBUG nova.compute.manager [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63241) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1786.055093] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1786.055333] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37ab0d29-f16f-4ec3-a813-6c170ce1c0fa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.062074] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1786.062074] env[63241]: value = "task-1820936" [ 1786.062074] env[63241]: _type = "Task" [ 1786.062074] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.069902] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820936, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.267983] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.268652] env[63241]: DEBUG nova.compute.manager [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1786.271709] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.372s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.273351] env[63241]: INFO nova.compute.claims [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1786.349781] env[63241]: DEBUG oslo_vmware.api [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1820935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.425412} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.350067] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1786.350267] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1786.350441] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1786.363117] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.363470] env[63241]: DEBUG nova.compute.manager [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Instance network_info: |[{"id": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "address": "fa:16:3e:0e:96:49", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f97669d-a2", "ovs_interfaceid": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1786.364935] env[63241]: DEBUG nova.compute.manager [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Received event network-changed-6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1786.365132] env[63241]: DEBUG nova.compute.manager [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing instance network info cache due to event network-changed-6f97669d-a2c6-4625-a1b6-374f5565ebb0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1786.365339] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] Acquiring lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.365479] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] Acquired lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.365679] env[63241]: DEBUG nova.network.neutron [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing network info cache for port 6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1786.366833] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:96:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f4a795c-8718-4a7c-aafe-9da231df10f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6f97669d-a2c6-4625-a1b6-374f5565ebb0', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1786.375369] env[63241]: DEBUG oslo.service.loopingcall [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1786.377356] env[63241]: INFO nova.scheduler.client.report [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleted allocations for instance 01af6dc5-e0e7-4f8b-ad07-73af80c32577 [ 1786.382353] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1786.383337] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4522b78c-a7b5-446a-8b85-3fe6df8790db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.403676] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1786.403676] env[63241]: value = "task-1820937" [ 1786.403676] env[63241]: _type = "Task" [ 1786.403676] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.411978] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820937, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.571942] env[63241]: DEBUG oslo_vmware.api [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820936, 'name': PowerOffVM_Task, 'duration_secs': 0.264446} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.572254] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1786.572486] env[63241]: DEBUG nova.compute.manager [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1786.573236] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc887f3f-5979-41ff-935e-5e5c213bdb1e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.778736] env[63241]: DEBUG nova.compute.utils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1786.781981] env[63241]: DEBUG nova.compute.manager [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1786.782167] env[63241]: DEBUG nova.network.neutron [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1786.812643] env[63241]: DEBUG nova.network.neutron [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updated VIF entry in instance network info cache for port 6f97669d-a2c6-4625-a1b6-374f5565ebb0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1786.812790] env[63241]: DEBUG nova.network.neutron [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updating instance_info_cache with network_info: [{"id": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "address": "fa:16:3e:0e:96:49", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f97669d-a2", "ovs_interfaceid": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.826294] env[63241]: DEBUG nova.policy [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8f8e170296b46d6a108092608492772', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e139fb67702e42d8a8b2401cc6be9303', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1786.827846] env[63241]: DEBUG nova.network.neutron [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Successfully updated port: fd9d90f6-0bb7-4052-9f8e-2ad931e916ed {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1786.882092] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.916227] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820937, 'name': CreateVM_Task, 'duration_secs': 0.353093} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.916434] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1786.919936] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.919936] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.920319] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1786.920319] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27c8bca9-6861-45e0-ad32-d435081dd8f1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.925712] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1786.925712] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e7bb26-d867-7e94-fa7f-d1f9876370ea" [ 1786.925712] env[63241]: _type = "Task" [ 1786.925712] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.933735] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e7bb26-d867-7e94-fa7f-d1f9876370ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.089078] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.044s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.107306] env[63241]: DEBUG nova.network.neutron [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Successfully created port: 95095173-ff26-4be0-88de-b44051605ee6 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1787.282805] env[63241]: DEBUG nova.compute.manager [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1787.321020] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] Releasing lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.321020] env[63241]: DEBUG nova.compute.manager [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Received event network-vif-unplugged-7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1787.321020] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] Acquiring lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.321020] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.321020] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.321020] env[63241]: DEBUG nova.compute.manager [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] No waiting events found dispatching network-vif-unplugged-7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1787.321020] env[63241]: WARNING nova.compute.manager [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Received unexpected event network-vif-unplugged-7a0be842-edfe-48ff-9275-dbb260c7e781 for instance with vm_state shelved and task_state shelving_offloading. [ 1787.321020] env[63241]: DEBUG nova.compute.manager [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Received event network-changed-7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1787.321020] env[63241]: DEBUG nova.compute.manager [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Refreshing instance network info cache due to event network-changed-7a0be842-edfe-48ff-9275-dbb260c7e781. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1787.321392] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] Acquiring lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.321485] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] Acquired lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.321758] env[63241]: DEBUG nova.network.neutron [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Refreshing network info cache for port 7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1787.333040] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquiring lock "refresh_cache-91b65576-47be-4a92-a6fd-8380532c8e1d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.333040] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquired lock "refresh_cache-91b65576-47be-4a92-a6fd-8380532c8e1d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.333384] env[63241]: DEBUG nova.network.neutron [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1787.438724] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e7bb26-d867-7e94-fa7f-d1f9876370ea, 'name': SearchDatastore_Task, 'duration_secs': 0.183577} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.439509] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.439509] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1787.439509] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.439662] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.439715] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1787.439973] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a8b8a64-805c-4980-9be2-ab44ddad03c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.453070] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1787.453273] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1787.456291] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc074860-1b79-457e-b943-81866c5c3ba1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.461859] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1787.461859] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528669a6-f1a5-75cf-710e-99edd561b1da" [ 1787.461859] env[63241]: _type = "Task" [ 1787.461859] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.469016] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528669a6-f1a5-75cf-710e-99edd561b1da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.528501] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9faa0192-59d4-4b37-b40b-8976858fbacc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.535676] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5eea3d8-bf37-464a-a722-91f1efcbc7a4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.565308] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9d5d79-2d0b-417d-90a6-834b8c64affb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.571817] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3ad136-0af9-4853-b33d-0b949d95e930 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.584404] env[63241]: DEBUG nova.compute.provider_tree [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1787.595954] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.898482] env[63241]: DEBUG nova.network.neutron [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1787.973481] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]528669a6-f1a5-75cf-710e-99edd561b1da, 'name': SearchDatastore_Task, 'duration_secs': 0.008234} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.974311] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccaa7d52-783c-4a2a-8f15-694b53d483a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.979683] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1787.979683] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52212b5f-7cad-e7a7-158b-cfc83b4675af" [ 1787.979683] env[63241]: _type = "Task" [ 1787.979683] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.987319] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52212b5f-7cad-e7a7-158b-cfc83b4675af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.087095] env[63241]: DEBUG nova.scheduler.client.report [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1788.278387] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.296492] env[63241]: DEBUG nova.compute.manager [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1788.318059] env[63241]: DEBUG nova.network.neutron [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Updating instance_info_cache with network_info: [{"id": "fd9d90f6-0bb7-4052-9f8e-2ad931e916ed", "address": "fa:16:3e:7d:3d:47", "network": {"id": "32bfe241-e035-47ec-bedd-6caafadcbb18", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-966878084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99c34b76394424983c77aa10d41233d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd9d90f6-0b", "ovs_interfaceid": "fd9d90f6-0bb7-4052-9f8e-2ad931e916ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.323143] env[63241]: DEBUG nova.virt.hardware [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1788.323143] env[63241]: DEBUG nova.virt.hardware [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1788.323143] env[63241]: DEBUG nova.virt.hardware [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1788.323143] env[63241]: DEBUG nova.virt.hardware [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1788.323383] env[63241]: DEBUG nova.virt.hardware [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1788.323419] env[63241]: DEBUG nova.virt.hardware [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1788.323883] env[63241]: DEBUG nova.virt.hardware [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1788.323883] env[63241]: DEBUG nova.virt.hardware [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1788.323989] env[63241]: DEBUG nova.virt.hardware [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1788.324301] env[63241]: DEBUG nova.virt.hardware [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1788.324512] env[63241]: DEBUG nova.virt.hardware [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1788.325366] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f85799-a973-48aa-9652-d72180f77f29 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.333893] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18642c72-a22c-497b-b9d8-602267e17d4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.365442] env[63241]: DEBUG nova.network.neutron [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updated VIF entry in instance network info cache for port 7a0be842-edfe-48ff-9275-dbb260c7e781. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1788.365768] env[63241]: DEBUG nova.network.neutron [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updating instance_info_cache with network_info: [{"id": "7a0be842-edfe-48ff-9275-dbb260c7e781", "address": "fa:16:3e:aa:cc:cf", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap7a0be842-ed", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.491368] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52212b5f-7cad-e7a7-158b-cfc83b4675af, 'name': SearchDatastore_Task, 'duration_secs': 0.008687} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.491639] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.491893] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 6b96988b-cc79-41d7-a17d-277ae5aeb4dc/6b96988b-cc79-41d7-a17d-277ae5aeb4dc.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1788.492163] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8f95c22-1eeb-499c-8f06-3c739a1e3f79 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.498532] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1788.498532] env[63241]: value = "task-1820938" [ 1788.498532] env[63241]: _type = "Task" [ 1788.498532] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.506147] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.592563] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.593216] env[63241]: DEBUG nova.compute.manager [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1788.597094] env[63241]: DEBUG nova.compute.manager [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Received event network-vif-plugged-fd9d90f6-0bb7-4052-9f8e-2ad931e916ed {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1788.597429] env[63241]: DEBUG oslo_concurrency.lockutils [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] Acquiring lock "91b65576-47be-4a92-a6fd-8380532c8e1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.597800] env[63241]: DEBUG oslo_concurrency.lockutils [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] Lock "91b65576-47be-4a92-a6fd-8380532c8e1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.598106] env[63241]: DEBUG oslo_concurrency.lockutils [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] Lock "91b65576-47be-4a92-a6fd-8380532c8e1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.598329] env[63241]: DEBUG nova.compute.manager [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] No waiting events found dispatching network-vif-plugged-fd9d90f6-0bb7-4052-9f8e-2ad931e916ed {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1788.598579] env[63241]: WARNING nova.compute.manager [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Received unexpected event network-vif-plugged-fd9d90f6-0bb7-4052-9f8e-2ad931e916ed for instance with vm_state building and task_state spawning. [ 1788.598825] env[63241]: DEBUG nova.compute.manager [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Received event network-changed-fd9d90f6-0bb7-4052-9f8e-2ad931e916ed {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1788.599092] env[63241]: DEBUG nova.compute.manager [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Refreshing instance network info cache due to event network-changed-fd9d90f6-0bb7-4052-9f8e-2ad931e916ed. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1788.599330] env[63241]: DEBUG oslo_concurrency.lockutils [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] Acquiring lock "refresh_cache-91b65576-47be-4a92-a6fd-8380532c8e1d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1788.599943] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.954s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.601453] env[63241]: INFO nova.compute.claims [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1788.820791] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Releasing lock "refresh_cache-91b65576-47be-4a92-a6fd-8380532c8e1d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.821184] env[63241]: DEBUG nova.compute.manager [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Instance network_info: |[{"id": "fd9d90f6-0bb7-4052-9f8e-2ad931e916ed", "address": "fa:16:3e:7d:3d:47", "network": {"id": "32bfe241-e035-47ec-bedd-6caafadcbb18", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-966878084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99c34b76394424983c77aa10d41233d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd9d90f6-0b", "ovs_interfaceid": "fd9d90f6-0bb7-4052-9f8e-2ad931e916ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1788.821528] env[63241]: DEBUG oslo_concurrency.lockutils [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] Acquired lock "refresh_cache-91b65576-47be-4a92-a6fd-8380532c8e1d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.821742] env[63241]: DEBUG nova.network.neutron [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Refreshing network info cache for port fd9d90f6-0bb7-4052-9f8e-2ad931e916ed {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1788.823199] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:3d:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73f6629b-7f80-4a5b-8f15-c7a1635b3c33', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd9d90f6-0bb7-4052-9f8e-2ad931e916ed', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1788.831819] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Creating folder: Project (b99c34b76394424983c77aa10d41233d). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1788.833077] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cac7253-6e73-4da2-a29a-37069bce50bd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.844838] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Created folder: Project (b99c34b76394424983c77aa10d41233d) in parent group-v376927. [ 1788.845053] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Creating folder: Instances. Parent ref: group-v377180. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1788.845389] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-347547ab-b91a-4333-bba6-228959c75474 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.855933] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Created folder: Instances in parent group-v377180. [ 1788.856302] env[63241]: DEBUG oslo.service.loopingcall [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1788.856519] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1788.856762] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-710f11f5-2914-4724-8881-ac126e1fe950 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.873093] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c36a83b-42de-4a1e-8630-a0f6c37e7fb3 req-9f565ec5-4647-4347-b48c-e5e36f9fe177 service nova] Releasing lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.879330] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1788.879330] env[63241]: value = "task-1820941" [ 1788.879330] env[63241]: _type = "Task" [ 1788.879330] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.888513] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.008241] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820938, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.402811} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.008488] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 6b96988b-cc79-41d7-a17d-277ae5aeb4dc/6b96988b-cc79-41d7-a17d-277ae5aeb4dc.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1789.008697] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1789.008968] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d6becc3-73cd-4b89-b0b5-9f0a4d282b6f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.015010] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1789.015010] env[63241]: value = "task-1820942" [ 1789.015010] env[63241]: _type = "Task" [ 1789.015010] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.022412] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.070270] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.070520] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.070735] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.070920] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.071106] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.073249] env[63241]: INFO nova.compute.manager [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Terminating instance [ 1789.074979] env[63241]: DEBUG nova.compute.manager [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1789.075183] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1789.076053] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dafd8b6-3ead-45c0-a91c-93d81465e6e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.082875] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1789.083065] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e07f0ae-1818-4704-ae30-de70268d3343 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.101341] env[63241]: DEBUG nova.compute.utils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1789.102554] env[63241]: DEBUG nova.compute.manager [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1789.102719] env[63241]: DEBUG nova.network.neutron [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1789.152559] env[63241]: DEBUG nova.policy [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f52bf5741a490c83e01e06f686559e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c64d07a686b414f93ec4c599307498f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1789.390342] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.532892] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063788} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.533181] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1789.533967] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7030fb-ca64-408e-bf60-27a915e0a3d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.558575] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 6b96988b-cc79-41d7-a17d-277ae5aeb4dc/6b96988b-cc79-41d7-a17d-277ae5aeb4dc.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1789.558884] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06ad346a-3e10-4a62-b1fe-1dc31c46a08e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.579684] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1789.579684] env[63241]: value = "task-1820944" [ 1789.579684] env[63241]: _type = "Task" [ 1789.579684] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.589239] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820944, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.606695] env[63241]: DEBUG nova.compute.manager [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1789.654679] env[63241]: DEBUG nova.network.neutron [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Successfully created port: 1c88bbab-4bd7-4ea5-858c-317020381bac {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1789.817948] env[63241]: DEBUG nova.network.neutron [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Updated VIF entry in instance network info cache for port fd9d90f6-0bb7-4052-9f8e-2ad931e916ed. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1789.818339] env[63241]: DEBUG nova.network.neutron [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Updating instance_info_cache with network_info: [{"id": "fd9d90f6-0bb7-4052-9f8e-2ad931e916ed", "address": "fa:16:3e:7d:3d:47", "network": {"id": "32bfe241-e035-47ec-bedd-6caafadcbb18", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-966878084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99c34b76394424983c77aa10d41233d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd9d90f6-0b", "ovs_interfaceid": "fd9d90f6-0bb7-4052-9f8e-2ad931e916ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1789.836899] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc4b5be-9054-42a9-a76a-fe9766355b58 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.843821] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e608656-642b-40c6-b16a-8affb2844db6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.885766] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557f910a-aa95-4da0-a52a-78bc0c669689 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.893561] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.896671] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462e6414-ba0b-43fe-b921-9d60435c9bcc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.911497] env[63241]: DEBUG nova.compute.provider_tree [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1790.090326] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.321252] env[63241]: DEBUG oslo_concurrency.lockutils [req-27690961-35bb-44fa-9b0c-c301e6a058d3 req-0b0f6ad3-9ff7-46ce-9767-65e47adbacf5 service nova] Releasing lock "refresh_cache-91b65576-47be-4a92-a6fd-8380532c8e1d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.391838] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.415592] env[63241]: DEBUG nova.scheduler.client.report [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1790.589823] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820944, 'name': ReconfigVM_Task, 'duration_secs': 0.687891} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.591042] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 6b96988b-cc79-41d7-a17d-277ae5aeb4dc/6b96988b-cc79-41d7-a17d-277ae5aeb4dc.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1790.591042] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cabc0584-36e6-4756-b462-be521534c93c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.597281] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1790.597281] env[63241]: value = "task-1820945" [ 1790.597281] env[63241]: _type = "Task" [ 1790.597281] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.604408] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820945, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.617018] env[63241]: DEBUG nova.compute.manager [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1790.649565] env[63241]: DEBUG nova.virt.hardware [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1790.649812] env[63241]: DEBUG nova.virt.hardware [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1790.649970] env[63241]: DEBUG nova.virt.hardware [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1790.650247] env[63241]: DEBUG nova.virt.hardware [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1790.650407] env[63241]: DEBUG nova.virt.hardware [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1790.650560] env[63241]: DEBUG nova.virt.hardware [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1790.650775] env[63241]: DEBUG nova.virt.hardware [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1790.650937] env[63241]: DEBUG nova.virt.hardware [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1790.651120] env[63241]: DEBUG nova.virt.hardware [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1790.651284] env[63241]: DEBUG nova.virt.hardware [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1790.651457] env[63241]: DEBUG nova.virt.hardware [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1790.652350] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a314e2-3207-43f5-82b1-607ff9fa917f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.659866] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e0cbea-9ca4-4940-a776-124f3b51dae7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.892230] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.921297] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.921825] env[63241]: DEBUG nova.compute.manager [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1790.925123] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.116s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.925352] env[63241]: DEBUG nova.objects.instance [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lazy-loading 'resources' on Instance uuid 0e4a3b3a-4464-404f-9154-1ab6f97ae951 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1791.107884] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820945, 'name': Rename_Task, 'duration_secs': 0.141468} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.108251] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1791.108505] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38404f90-4f3d-4458-a0b4-916065bfe7e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.114369] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1791.114369] env[63241]: value = "task-1820946" [ 1791.114369] env[63241]: _type = "Task" [ 1791.114369] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.122195] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.393411] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.428768] env[63241]: DEBUG nova.objects.instance [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lazy-loading 'numa_topology' on Instance uuid 0e4a3b3a-4464-404f-9154-1ab6f97ae951 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1791.430292] env[63241]: DEBUG nova.compute.utils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1791.431442] env[63241]: DEBUG nova.compute.manager [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1791.431677] env[63241]: DEBUG nova.network.neutron [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1791.494929] env[63241]: DEBUG nova.policy [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa5224c96c3545269f4f45be620a7cdf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98582d7ee18145318ee5a05cac36781e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1791.625182] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820946, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.824294] env[63241]: DEBUG nova.network.neutron [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Successfully created port: 7133056f-eb77-48c4-b773-9923e2a62fc6 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1791.851875] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "29b6caa8-a07c-494b-b776-b08affa45c87" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.852134] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.893262] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.932083] env[63241]: DEBUG nova.objects.base [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Object Instance<0e4a3b3a-4464-404f-9154-1ab6f97ae951> lazy-loaded attributes: resources,numa_topology {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1791.935514] env[63241]: DEBUG nova.compute.manager [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1792.129939] env[63241]: DEBUG oslo_vmware.api [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1820946, 'name': PowerOnVM_Task, 'duration_secs': 0.515638} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.130350] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1792.130439] env[63241]: INFO nova.compute.manager [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Took 8.50 seconds to spawn the instance on the hypervisor. [ 1792.130616] env[63241]: DEBUG nova.compute.manager [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1792.131436] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1a8e0f-0b2d-42b0-9bce-ddde871367a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.228053] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72874b47-e456-4a19-bfaa-90c9ca286328 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.235773] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a36e814-58a5-4d62-a959-41a7ceb1f310 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.269378] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258e8a11-75f8-4070-aeb9-ed209b2312b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.278643] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed5faa0-c5fa-4793-8cb9-81aad9b2adfe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.294060] env[63241]: DEBUG nova.compute.provider_tree [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1792.355864] env[63241]: INFO nova.compute.manager [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Detaching volume 7f85c901-777e-4254-8502-a75d490b1a9e [ 1792.393975] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.395105] env[63241]: INFO nova.virt.block_device [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Attempting to driver detach volume 7f85c901-777e-4254-8502-a75d490b1a9e from mountpoint /dev/sdb [ 1792.395299] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1792.395488] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377159', 'volume_id': '7f85c901-777e-4254-8502-a75d490b1a9e', 'name': 'volume-7f85c901-777e-4254-8502-a75d490b1a9e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '29b6caa8-a07c-494b-b776-b08affa45c87', 'attached_at': '', 'detached_at': '', 'volume_id': '7f85c901-777e-4254-8502-a75d490b1a9e', 'serial': '7f85c901-777e-4254-8502-a75d490b1a9e'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1792.396261] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1629c9-6528-414d-b3a4-7c534a759666 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.416197] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1f0766-fc84-4ae0-8c84-7b7f8102290c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.422702] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182d7004-6b73-4c12-98e0-e01008a4feb5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.445472] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f000892-3594-402e-b964-f1632ca9b7de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.460376] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] The volume has not been displaced from its original location: [datastore1] volume-7f85c901-777e-4254-8502-a75d490b1a9e/volume-7f85c901-777e-4254-8502-a75d490b1a9e.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1792.465761] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Reconfiguring VM instance instance-00000047 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1792.466109] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ecdce7d-bfee-42f5-91ac-522c528ba271 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.484025] env[63241]: DEBUG oslo_vmware.api [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1792.484025] env[63241]: value = "task-1820947" [ 1792.484025] env[63241]: _type = "Task" [ 1792.484025] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.494192] env[63241]: DEBUG oslo_vmware.api [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820947, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.688723] env[63241]: INFO nova.compute.manager [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Took 13.47 seconds to build instance. [ 1792.796774] env[63241]: DEBUG nova.scheduler.client.report [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1792.893814] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.949833] env[63241]: DEBUG nova.compute.manager [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1792.978773] env[63241]: DEBUG nova.virt.hardware [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1792.979106] env[63241]: DEBUG nova.virt.hardware [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1792.979288] env[63241]: DEBUG nova.virt.hardware [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1792.979476] env[63241]: DEBUG nova.virt.hardware [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1792.979622] env[63241]: DEBUG nova.virt.hardware [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1792.979770] env[63241]: DEBUG nova.virt.hardware [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1792.979975] env[63241]: DEBUG nova.virt.hardware [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1792.980154] env[63241]: DEBUG nova.virt.hardware [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1792.980351] env[63241]: DEBUG nova.virt.hardware [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1792.980519] env[63241]: DEBUG nova.virt.hardware [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1792.980689] env[63241]: DEBUG nova.virt.hardware [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1792.981594] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4700753-b613-4bc0-9cf2-19d41945583f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.990630] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff53e972-b06d-4d05-bc00-94f3e119645f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.996812] env[63241]: DEBUG oslo_vmware.api [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820947, 'name': ReconfigVM_Task, 'duration_secs': 0.238383} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.997351] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Reconfigured VM instance instance-00000047 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1793.009114] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbfab0c2-0def-4f5f-b6e4-afda1d653218 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.024234] env[63241]: DEBUG oslo_vmware.api [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1793.024234] env[63241]: value = "task-1820948" [ 1793.024234] env[63241]: _type = "Task" [ 1793.024234] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.031609] env[63241]: DEBUG oslo_vmware.api [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820948, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.191204] env[63241]: DEBUG oslo_concurrency.lockutils [None req-495319ff-1e2f-40ed-9d9b-73baf772a0a9 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.984s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.301348] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.376s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.303913] env[63241]: DEBUG oslo_concurrency.lockutils [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.324s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.304123] env[63241]: DEBUG oslo_concurrency.lockutils [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.306201] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.265s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.306421] env[63241]: DEBUG nova.objects.instance [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lazy-loading 'resources' on Instance uuid 46ac69f3-375c-4b60-bc33-83ad8577c4fb {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1793.349537] env[63241]: INFO nova.scheduler.client.report [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted allocations for instance 864175e0-33f0-429f-bdf6-722d9b00da2b [ 1793.396107] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.536009] env[63241]: DEBUG oslo_vmware.api [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820948, 'name': ReconfigVM_Task, 'duration_secs': 0.132445} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.536391] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377159', 'volume_id': '7f85c901-777e-4254-8502-a75d490b1a9e', 'name': 'volume-7f85c901-777e-4254-8502-a75d490b1a9e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '29b6caa8-a07c-494b-b776-b08affa45c87', 'attached_at': '', 'detached_at': '', 'volume_id': '7f85c901-777e-4254-8502-a75d490b1a9e', 'serial': '7f85c901-777e-4254-8502-a75d490b1a9e'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1793.819296] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c83f5782-2577-43a5-ac49-c39ccd0338bb tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 32.637s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.820242] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 8.980s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.820431] env[63241]: INFO nova.compute.manager [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Unshelving [ 1793.823747] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1793.823944] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1793.824230] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleting the datastore file [datastore1] b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1793.826739] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7107c2b7-c7d7-4f48-be94-a89072089b98 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.831914] env[63241]: DEBUG oslo_vmware.api [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1793.831914] env[63241]: value = "task-1820949" [ 1793.831914] env[63241]: _type = "Task" [ 1793.831914] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.843897] env[63241]: DEBUG oslo_vmware.api [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820949, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.861864] env[63241]: DEBUG oslo_concurrency.lockutils [None req-20d58ae9-0aaa-4808-9234-bcd1e05f20b3 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "864175e0-33f0-429f-bdf6-722d9b00da2b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.089s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.899621] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.064336] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb68aae-7614-48c0-aa4a-d838d65c8a9d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.072762] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2d9111-6b9d-4d13-a722-2a654928c6e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.104361] env[63241]: DEBUG nova.objects.instance [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'flavor' on Instance uuid 29b6caa8-a07c-494b-b776-b08affa45c87 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1794.106248] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18dbe96-bb41-42d4-b5bb-a9a65d6b64e4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.114398] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefb93cf-d8d1-4741-a8f0-9fab93c34205 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.134317] env[63241]: DEBUG nova.compute.provider_tree [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1794.173147] env[63241]: DEBUG nova.compute.manager [req-c1150c9e-fcb6-405a-9066-267a1caba882 req-22993b61-ce61-4800-ada3-30fc4c52427d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Received event network-changed-6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1794.173147] env[63241]: DEBUG nova.compute.manager [req-c1150c9e-fcb6-405a-9066-267a1caba882 req-22993b61-ce61-4800-ada3-30fc4c52427d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing instance network info cache due to event network-changed-6f97669d-a2c6-4625-a1b6-374f5565ebb0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1794.173147] env[63241]: DEBUG oslo_concurrency.lockutils [req-c1150c9e-fcb6-405a-9066-267a1caba882 req-22993b61-ce61-4800-ada3-30fc4c52427d service nova] Acquiring lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.173147] env[63241]: DEBUG oslo_concurrency.lockutils [req-c1150c9e-fcb6-405a-9066-267a1caba882 req-22993b61-ce61-4800-ada3-30fc4c52427d service nova] Acquired lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.173147] env[63241]: DEBUG nova.network.neutron [req-c1150c9e-fcb6-405a-9066-267a1caba882 req-22993b61-ce61-4800-ada3-30fc4c52427d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing network info cache for port 6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1794.350519] env[63241]: DEBUG oslo_vmware.api [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1820949, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140775} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.351457] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1794.351794] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1794.352215] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1794.354016] env[63241]: INFO nova.compute.manager [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Took 5.28 seconds to destroy the instance on the hypervisor. [ 1794.354016] env[63241]: DEBUG oslo.service.loopingcall [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1794.354016] env[63241]: DEBUG nova.compute.manager [-] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1794.354016] env[63241]: DEBUG nova.network.neutron [-] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1794.403768] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820941, 'name': CreateVM_Task, 'duration_secs': 5.154008} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.405070] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1794.408715] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.408715] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.408715] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1794.408715] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26a393c5-49b4-4394-b6d6-9e0f82be0f3b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.412366] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for the task: (returnval){ [ 1794.412366] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52cfaa68-4b85-bb1b-efbd-790c2cc9720c" [ 1794.412366] env[63241]: _type = "Task" [ 1794.412366] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.423308] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52cfaa68-4b85-bb1b-efbd-790c2cc9720c, 'name': SearchDatastore_Task} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.425886] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.426223] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1794.426512] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.426720] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.426949] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1794.427385] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1794.427657] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0a8d78e-fe3b-49f9-8ee9-cb84e83bc16d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.429672] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1794.439973] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1794.440498] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1794.441670] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abdd36c5-a35d-4986-a739-3d243cfc7185 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.450721] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for the task: (returnval){ [ 1794.450721] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523a1dcb-f5ab-1606-5648-d0dcd5d58b6f" [ 1794.450721] env[63241]: _type = "Task" [ 1794.450721] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.468812] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523a1dcb-f5ab-1606-5648-d0dcd5d58b6f, 'name': SearchDatastore_Task, 'duration_secs': 0.009779} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.470404] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d15cb624-6ca1-4ab1-95fc-a0ecc5277dd2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.478980] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for the task: (returnval){ [ 1794.478980] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a68e3e-6779-6caf-d0b8-b28f001df3f5" [ 1794.478980] env[63241]: _type = "Task" [ 1794.478980] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.501238] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a68e3e-6779-6caf-d0b8-b28f001df3f5, 'name': SearchDatastore_Task, 'duration_secs': 0.010851} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.501763] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.502197] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 91b65576-47be-4a92-a6fd-8380532c8e1d/91b65576-47be-4a92-a6fd-8380532c8e1d.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1794.502574] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d9cb112-85e8-4333-8064-5fd5d2cf0f65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.510659] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for the task: (returnval){ [ 1794.510659] env[63241]: value = "task-1820950" [ 1794.510659] env[63241]: _type = "Task" [ 1794.510659] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.519370] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820950, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.639930] env[63241]: DEBUG nova.scheduler.client.report [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1794.852963] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.867093] env[63241]: DEBUG nova.compute.manager [req-3c7b01c1-d1a2-4f98-8fda-b4ffff38a600 req-287e5527-7c86-4626-8a1f-77c168349670 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Received event network-vif-plugged-95095173-ff26-4be0-88de-b44051605ee6 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1794.867810] env[63241]: DEBUG oslo_concurrency.lockutils [req-3c7b01c1-d1a2-4f98-8fda-b4ffff38a600 req-287e5527-7c86-4626-8a1f-77c168349670 service nova] Acquiring lock "12b99b2b-56f0-4ce9-8897-f429c2084f38-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.869177] env[63241]: DEBUG oslo_concurrency.lockutils [req-3c7b01c1-d1a2-4f98-8fda-b4ffff38a600 req-287e5527-7c86-4626-8a1f-77c168349670 service nova] Lock "12b99b2b-56f0-4ce9-8897-f429c2084f38-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.869177] env[63241]: DEBUG oslo_concurrency.lockutils [req-3c7b01c1-d1a2-4f98-8fda-b4ffff38a600 req-287e5527-7c86-4626-8a1f-77c168349670 service nova] Lock "12b99b2b-56f0-4ce9-8897-f429c2084f38-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.869177] env[63241]: DEBUG nova.compute.manager [req-3c7b01c1-d1a2-4f98-8fda-b4ffff38a600 req-287e5527-7c86-4626-8a1f-77c168349670 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] No waiting events found dispatching network-vif-plugged-95095173-ff26-4be0-88de-b44051605ee6 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1794.869177] env[63241]: WARNING nova.compute.manager [req-3c7b01c1-d1a2-4f98-8fda-b4ffff38a600 req-287e5527-7c86-4626-8a1f-77c168349670 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Received unexpected event network-vif-plugged-95095173-ff26-4be0-88de-b44051605ee6 for instance with vm_state building and task_state spawning. [ 1794.893890] env[63241]: DEBUG nova.network.neutron [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Successfully updated port: 95095173-ff26-4be0-88de-b44051605ee6 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1794.941195] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1794.941612] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1794.941899] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1795.021460] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820950, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.118206] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a4299753-7d18-4ef8-87de-bd36d011ea4b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.263s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.147792] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.839s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.149417] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.268s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.149968] env[63241]: DEBUG nova.objects.instance [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lazy-loading 'resources' on Instance uuid 01af6dc5-e0e7-4f8b-ad07-73af80c32577 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1795.182134] env[63241]: INFO nova.scheduler.client.report [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted allocations for instance 46ac69f3-375c-4b60-bc33-83ad8577c4fb [ 1795.186239] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "29b6caa8-a07c-494b-b776-b08affa45c87" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.186453] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.186624] env[63241]: DEBUG nova.compute.manager [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1795.188501] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba9019d-6e68-43ec-bce1-fe7ca82990e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.198078] env[63241]: DEBUG nova.compute.manager [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63241) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1795.198803] env[63241]: DEBUG nova.objects.instance [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'flavor' on Instance uuid 29b6caa8-a07c-494b-b776-b08affa45c87 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1795.262962] env[63241]: DEBUG nova.network.neutron [-] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.330401] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.330401] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.388147] env[63241]: DEBUG nova.network.neutron [req-c1150c9e-fcb6-405a-9066-267a1caba882 req-22993b61-ce61-4800-ada3-30fc4c52427d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updated VIF entry in instance network info cache for port 6f97669d-a2c6-4625-a1b6-374f5565ebb0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1795.388546] env[63241]: DEBUG nova.network.neutron [req-c1150c9e-fcb6-405a-9066-267a1caba882 req-22993b61-ce61-4800-ada3-30fc4c52427d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updating instance_info_cache with network_info: [{"id": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "address": "fa:16:3e:0e:96:49", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f97669d-a2", "ovs_interfaceid": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.395754] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "refresh_cache-12b99b2b-56f0-4ce9-8897-f429c2084f38" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.395883] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired lock "refresh_cache-12b99b2b-56f0-4ce9-8897-f429c2084f38" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.396039] env[63241]: DEBUG nova.network.neutron [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1795.453164] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Skipping network cache update for instance because it is being deleted. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1795.453164] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Skipping network cache update for instance because it is Building. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1795.453164] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Skipping network cache update for instance because it is Building. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1795.453164] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Skipping network cache update for instance because it is Building. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1795.453164] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Skipping network cache update for instance because it is Building. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1795.471643] env[63241]: DEBUG nova.compute.manager [req-68d7665e-0c99-4740-9e76-bb5496587e81 req-b8726939-5f84-470b-b514-ac5c466b00c0 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Received event network-vif-plugged-1c88bbab-4bd7-4ea5-858c-317020381bac {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.471852] env[63241]: DEBUG oslo_concurrency.lockutils [req-68d7665e-0c99-4740-9e76-bb5496587e81 req-b8726939-5f84-470b-b514-ac5c466b00c0 service nova] Acquiring lock "49d350ff-4932-4759-a3fa-53274c484ae6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.472076] env[63241]: DEBUG oslo_concurrency.lockutils [req-68d7665e-0c99-4740-9e76-bb5496587e81 req-b8726939-5f84-470b-b514-ac5c466b00c0 service nova] Lock "49d350ff-4932-4759-a3fa-53274c484ae6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.472243] env[63241]: DEBUG oslo_concurrency.lockutils [req-68d7665e-0c99-4740-9e76-bb5496587e81 req-b8726939-5f84-470b-b514-ac5c466b00c0 service nova] Lock "49d350ff-4932-4759-a3fa-53274c484ae6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.472404] env[63241]: DEBUG nova.compute.manager [req-68d7665e-0c99-4740-9e76-bb5496587e81 req-b8726939-5f84-470b-b514-ac5c466b00c0 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] No waiting events found dispatching network-vif-plugged-1c88bbab-4bd7-4ea5-858c-317020381bac {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1795.472563] env[63241]: WARNING nova.compute.manager [req-68d7665e-0c99-4740-9e76-bb5496587e81 req-b8726939-5f84-470b-b514-ac5c466b00c0 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Received unexpected event network-vif-plugged-1c88bbab-4bd7-4ea5-858c-317020381bac for instance with vm_state building and task_state spawning. [ 1795.515936] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-94a604da-ad3d-415a-aa92-d648e3da803d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.516106] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-94a604da-ad3d-415a-aa92-d648e3da803d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.516255] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1795.516410] env[63241]: DEBUG nova.objects.instance [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lazy-loading 'info_cache' on Instance uuid 94a604da-ad3d-415a-aa92-d648e3da803d {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1795.523642] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820950, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616539} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.523905] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 91b65576-47be-4a92-a6fd-8380532c8e1d/91b65576-47be-4a92-a6fd-8380532c8e1d.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1795.524174] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1795.524451] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75c23d25-6353-49d5-85fa-8c332f7b018e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.535960] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for the task: (returnval){ [ 1795.535960] env[63241]: value = "task-1820951" [ 1795.535960] env[63241]: _type = "Task" [ 1795.535960] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.545971] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820951, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.651899] env[63241]: DEBUG nova.network.neutron [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Successfully updated port: 7133056f-eb77-48c4-b773-9923e2a62fc6 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1795.655051] env[63241]: DEBUG nova.objects.instance [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lazy-loading 'numa_topology' on Instance uuid 01af6dc5-e0e7-4f8b-ad07-73af80c32577 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1795.691180] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6dad60df-fcd3-4c7a-b69e-3ba0804b8960 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "46ac69f3-375c-4b60-bc33-83ad8577c4fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.814s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.704193] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1795.704459] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b055c24-9c31-4440-9df7-2a299cf3038c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.713178] env[63241]: DEBUG oslo_vmware.api [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1795.713178] env[63241]: value = "task-1820952" [ 1795.713178] env[63241]: _type = "Task" [ 1795.713178] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.722840] env[63241]: DEBUG oslo_vmware.api [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820952, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.767879] env[63241]: INFO nova.compute.manager [-] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Took 1.41 seconds to deallocate network for instance. [ 1795.836349] env[63241]: DEBUG nova.network.neutron [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Successfully updated port: 1c88bbab-4bd7-4ea5-858c-317020381bac {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1795.836349] env[63241]: DEBUG nova.compute.manager [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1795.891236] env[63241]: DEBUG oslo_concurrency.lockutils [req-c1150c9e-fcb6-405a-9066-267a1caba882 req-22993b61-ce61-4800-ada3-30fc4c52427d service nova] Releasing lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.946092] env[63241]: DEBUG nova.network.neutron [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1796.045601] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820951, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.24259} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.045873] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1796.046648] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bde16e7-1f60-4e21-a65e-3a0eb36578dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.068794] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 91b65576-47be-4a92-a6fd-8380532c8e1d/91b65576-47be-4a92-a6fd-8380532c8e1d.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1796.072023] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0aac29c-dfff-4368-84bc-614372cac6fa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.091395] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for the task: (returnval){ [ 1796.091395] env[63241]: value = "task-1820953" [ 1796.091395] env[63241]: _type = "Task" [ 1796.091395] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.099695] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820953, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.137415] env[63241]: DEBUG nova.network.neutron [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Updating instance_info_cache with network_info: [{"id": "95095173-ff26-4be0-88de-b44051605ee6", "address": "fa:16:3e:95:7a:3f", "network": {"id": "1e0ff0ab-bb23-4187-abf3-c1d13c2971ac", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-634449129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e139fb67702e42d8a8b2401cc6be9303", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95095173-ff", "ovs_interfaceid": "95095173-ff26-4be0-88de-b44051605ee6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.155948] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "refresh_cache-d1abe122-0259-4f6e-b363-d7c0b1ae7a69" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.155948] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "refresh_cache-d1abe122-0259-4f6e-b363-d7c0b1ae7a69" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.155948] env[63241]: DEBUG nova.network.neutron [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1796.158143] env[63241]: DEBUG nova.objects.base [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Object Instance<01af6dc5-e0e7-4f8b-ad07-73af80c32577> lazy-loaded attributes: resources,numa_topology {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1796.203411] env[63241]: DEBUG nova.compute.manager [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Received event network-vif-plugged-7133056f-eb77-48c4-b773-9923e2a62fc6 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1796.203591] env[63241]: DEBUG oslo_concurrency.lockutils [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] Acquiring lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.203821] env[63241]: DEBUG oslo_concurrency.lockutils [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] Lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.204014] env[63241]: DEBUG oslo_concurrency.lockutils [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] Lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.204233] env[63241]: DEBUG nova.compute.manager [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] No waiting events found dispatching network-vif-plugged-7133056f-eb77-48c4-b773-9923e2a62fc6 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1796.204394] env[63241]: WARNING nova.compute.manager [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Received unexpected event network-vif-plugged-7133056f-eb77-48c4-b773-9923e2a62fc6 for instance with vm_state building and task_state spawning. [ 1796.204626] env[63241]: DEBUG nova.compute.manager [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Received event network-changed-7133056f-eb77-48c4-b773-9923e2a62fc6 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1796.204970] env[63241]: DEBUG nova.compute.manager [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Refreshing instance network info cache due to event network-changed-7133056f-eb77-48c4-b773-9923e2a62fc6. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1796.204970] env[63241]: DEBUG oslo_concurrency.lockutils [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] Acquiring lock "refresh_cache-d1abe122-0259-4f6e-b363-d7c0b1ae7a69" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.226486] env[63241]: DEBUG oslo_vmware.api [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820952, 'name': PowerOffVM_Task, 'duration_secs': 0.205105} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.226744] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1796.227318] env[63241]: DEBUG nova.compute.manager [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1796.227763] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bc1d8e-ddab-418a-b835-0e7399424adf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.272828] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.340363] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.340552] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.340733] env[63241]: DEBUG nova.network.neutron [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1796.358774] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.402505] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8651f3-97f2-4a01-a8fb-524f5d4b8985 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.410309] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6d5825-8895-49fd-9d99-0077decf8370 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.443921] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8260a7-7e2c-4737-ac21-62c0471be295 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.451847] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6a8e78-67d2-4dd8-847d-73e414d811cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.466982] env[63241]: DEBUG nova.compute.provider_tree [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1796.606759] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820953, 'name': ReconfigVM_Task, 'duration_secs': 0.358124} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.607091] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 91b65576-47be-4a92-a6fd-8380532c8e1d/91b65576-47be-4a92-a6fd-8380532c8e1d.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1796.607790] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31d46d38-e3b9-4a84-bae2-f0a5ad2ffab3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.614383] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for the task: (returnval){ [ 1796.614383] env[63241]: value = "task-1820954" [ 1796.614383] env[63241]: _type = "Task" [ 1796.614383] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.624583] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820954, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.640387] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Releasing lock "refresh_cache-12b99b2b-56f0-4ce9-8897-f429c2084f38" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.640797] env[63241]: DEBUG nova.compute.manager [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Instance network_info: |[{"id": "95095173-ff26-4be0-88de-b44051605ee6", "address": "fa:16:3e:95:7a:3f", "network": {"id": "1e0ff0ab-bb23-4187-abf3-c1d13c2971ac", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-634449129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e139fb67702e42d8a8b2401cc6be9303", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95095173-ff", "ovs_interfaceid": "95095173-ff26-4be0-88de-b44051605ee6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1796.641267] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:7a:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '95095173-ff26-4be0-88de-b44051605ee6', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1796.649749] env[63241]: DEBUG oslo.service.loopingcall [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1796.650069] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1796.650807] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aee086dc-099d-4a70-82b2-3c43edabc283 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.677864] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1796.677864] env[63241]: value = "task-1820955" [ 1796.677864] env[63241]: _type = "Task" [ 1796.677864] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.685542] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820955, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.704888] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "6055a56d-1e0d-47bc-930b-b62206a0263e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.705157] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "6055a56d-1e0d-47bc-930b-b62206a0263e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.709896] env[63241]: DEBUG nova.network.neutron [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1796.742598] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9337bf55-4723-491a-9be3-fd265a814f2f tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.556s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.907423] env[63241]: DEBUG nova.network.neutron [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1796.945307] env[63241]: DEBUG nova.compute.manager [req-999b6924-fe5d-46cf-b56f-c8f026417a3d req-c3937ce7-6a29-44dc-b425-fdeb797a0727 service nova] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Received event network-vif-deleted-a6b9cb89-6a29-44b5-91b1-0591266c582b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1796.945518] env[63241]: DEBUG nova.compute.manager [req-999b6924-fe5d-46cf-b56f-c8f026417a3d req-c3937ce7-6a29-44dc-b425-fdeb797a0727 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Received event network-changed-95095173-ff26-4be0-88de-b44051605ee6 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1796.945677] env[63241]: DEBUG nova.compute.manager [req-999b6924-fe5d-46cf-b56f-c8f026417a3d req-c3937ce7-6a29-44dc-b425-fdeb797a0727 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Refreshing instance network info cache due to event network-changed-95095173-ff26-4be0-88de-b44051605ee6. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1796.945971] env[63241]: DEBUG oslo_concurrency.lockutils [req-999b6924-fe5d-46cf-b56f-c8f026417a3d req-c3937ce7-6a29-44dc-b425-fdeb797a0727 service nova] Acquiring lock "refresh_cache-12b99b2b-56f0-4ce9-8897-f429c2084f38" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.946292] env[63241]: DEBUG oslo_concurrency.lockutils [req-999b6924-fe5d-46cf-b56f-c8f026417a3d req-c3937ce7-6a29-44dc-b425-fdeb797a0727 service nova] Acquired lock "refresh_cache-12b99b2b-56f0-4ce9-8897-f429c2084f38" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.946495] env[63241]: DEBUG nova.network.neutron [req-999b6924-fe5d-46cf-b56f-c8f026417a3d req-c3937ce7-6a29-44dc-b425-fdeb797a0727 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Refreshing network info cache for port 95095173-ff26-4be0-88de-b44051605ee6 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1796.970989] env[63241]: DEBUG nova.scheduler.client.report [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1797.038070] env[63241]: DEBUG nova.network.neutron [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Updating instance_info_cache with network_info: [{"id": "7133056f-eb77-48c4-b773-9923e2a62fc6", "address": "fa:16:3e:47:6f:32", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7133056f-eb", "ovs_interfaceid": "7133056f-eb77-48c4-b773-9923e2a62fc6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.124763] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820954, 'name': Rename_Task, 'duration_secs': 0.267158} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.124987] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1797.125256] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62a903d7-867e-4497-927e-86b0b66496b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.131642] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for the task: (returnval){ [ 1797.131642] env[63241]: value = "task-1820956" [ 1797.131642] env[63241]: _type = "Task" [ 1797.131642] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.142893] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.143767] env[63241]: DEBUG nova.network.neutron [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance_info_cache with network_info: [{"id": "1c88bbab-4bd7-4ea5-858c-317020381bac", "address": "fa:16:3e:b5:7a:04", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c88bbab-4b", "ovs_interfaceid": "1c88bbab-4bd7-4ea5-858c-317020381bac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.188472] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820955, 'name': CreateVM_Task, 'duration_secs': 0.410718} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.188652] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1797.189899] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.189899] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.190090] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1797.190443] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e68b7f78-af77-44d1-bce3-155415b218c1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.195294] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1797.195294] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5231781f-8c80-89e0-953c-d0fce6546eef" [ 1797.195294] env[63241]: _type = "Task" [ 1797.195294] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.203692] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5231781f-8c80-89e0-953c-d0fce6546eef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.208225] env[63241]: DEBUG nova.compute.manager [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1797.284790] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Updating instance_info_cache with network_info: [{"id": "c6bc0033-163b-4b6a-8577-877f59a975dc", "address": "fa:16:3e:61:79:2a", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6bc0033-16", "ovs_interfaceid": "c6bc0033-163b-4b6a-8577-877f59a975dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.476057] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.327s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.478615] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.883s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.482345] env[63241]: DEBUG nova.objects.instance [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1797.540874] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "refresh_cache-d1abe122-0259-4f6e-b363-d7c0b1ae7a69" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.541223] env[63241]: DEBUG nova.compute.manager [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Instance network_info: |[{"id": "7133056f-eb77-48c4-b773-9923e2a62fc6", "address": "fa:16:3e:47:6f:32", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7133056f-eb", "ovs_interfaceid": "7133056f-eb77-48c4-b773-9923e2a62fc6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1797.541525] env[63241]: DEBUG oslo_concurrency.lockutils [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] Acquired lock "refresh_cache-d1abe122-0259-4f6e-b363-d7c0b1ae7a69" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.541705] env[63241]: DEBUG nova.network.neutron [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Refreshing network info cache for port 7133056f-eb77-48c4-b773-9923e2a62fc6 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1797.543028] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:6f:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '111a2767-1b06-4fe5-852b-40c9b5a699fd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7133056f-eb77-48c4-b773-9923e2a62fc6', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1797.557430] env[63241]: DEBUG oslo.service.loopingcall [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1797.561126] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1797.561623] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd9e38d1-f691-46d5-b8ec-5109e0ee94c3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.582854] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1797.582854] env[63241]: value = "task-1820957" [ 1797.582854] env[63241]: _type = "Task" [ 1797.582854] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.591635] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820957, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.642544] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820956, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.647140] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.647463] env[63241]: DEBUG nova.compute.manager [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Instance network_info: |[{"id": "1c88bbab-4bd7-4ea5-858c-317020381bac", "address": "fa:16:3e:b5:7a:04", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c88bbab-4b", "ovs_interfaceid": "1c88bbab-4bd7-4ea5-858c-317020381bac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1797.647884] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:7a:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dacd109c-2442-41b8-b612-7ed3efbdaa94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c88bbab-4bd7-4ea5-858c-317020381bac', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1797.655976] env[63241]: DEBUG oslo.service.loopingcall [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1797.656264] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1797.656502] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e61709c9-0ca6-4b97-8030-ce71c2dd21af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.678411] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1797.678411] env[63241]: value = "task-1820958" [ 1797.678411] env[63241]: _type = "Task" [ 1797.678411] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.689464] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820958, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.703851] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5231781f-8c80-89e0-953c-d0fce6546eef, 'name': SearchDatastore_Task, 'duration_secs': 0.01012} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.704247] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.704495] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1797.704734] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.704879] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.705073] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1797.705341] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4f50ca1-af28-4d64-ba20-79faffa9ae96 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.713319] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1797.713500] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1797.718438] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dab6b11a-ea04-4a52-9ad6-d1fbac856b8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.730509] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1797.730509] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527d3d73-2b0c-9b0b-8584-dc335f6b4a96" [ 1797.730509] env[63241]: _type = "Task" [ 1797.730509] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.740021] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527d3d73-2b0c-9b0b-8584-dc335f6b4a96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.751294] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.788224] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-94a604da-ad3d-415a-aa92-d648e3da803d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.788456] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1797.788668] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.788825] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.788971] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.789139] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.789286] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.789477] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1797.995361] env[63241]: DEBUG oslo_concurrency.lockutils [None req-73050eda-1ed2-4b07-ac6f-5fb05362c097 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 35.846s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.996328] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 9.718s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.996579] env[63241]: INFO nova.compute.manager [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Unshelving [ 1798.094410] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820957, 'name': CreateVM_Task, 'duration_secs': 0.400518} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.094753] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1798.095675] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.095957] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.096484] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1798.096912] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-263c0dda-ed08-44b4-813d-95bcd1cba70c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.103361] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1798.103361] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52397203-a784-733f-9b52-abb9c455c704" [ 1798.103361] env[63241]: _type = "Task" [ 1798.103361] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.112036] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52397203-a784-733f-9b52-abb9c455c704, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.143554] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820956, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.191219] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820958, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.242541] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527d3d73-2b0c-9b0b-8584-dc335f6b4a96, 'name': SearchDatastore_Task, 'duration_secs': 0.008628} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.242541] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04501a90-7ca8-482a-8440-7acafe788a87 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.248230] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1798.248230] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5268d76c-b326-bd8d-dea0-b1c5c2530d5e" [ 1798.248230] env[63241]: _type = "Task" [ 1798.248230] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.260135] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5268d76c-b326-bd8d-dea0-b1c5c2530d5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.286453] env[63241]: DEBUG nova.network.neutron [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Updated VIF entry in instance network info cache for port 7133056f-eb77-48c4-b773-9923e2a62fc6. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1798.286453] env[63241]: DEBUG nova.network.neutron [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Updating instance_info_cache with network_info: [{"id": "7133056f-eb77-48c4-b773-9923e2a62fc6", "address": "fa:16:3e:47:6f:32", "network": {"id": "c8850653-f6ec-4211-9dde-ba757cf03a7a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1851174219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98582d7ee18145318ee5a05cac36781e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7133056f-eb", "ovs_interfaceid": "7133056f-eb77-48c4-b773-9923e2a62fc6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1798.295393] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Getting list of instances from cluster (obj){ [ 1798.295393] env[63241]: value = "domain-c8" [ 1798.295393] env[63241]: _type = "ClusterComputeResource" [ 1798.295393] env[63241]: } {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1798.296630] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ddb027-4ff0-49aa-9e1e-fd0157cdd9cc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.302289] env[63241]: DEBUG nova.network.neutron [req-999b6924-fe5d-46cf-b56f-c8f026417a3d req-c3937ce7-6a29-44dc-b425-fdeb797a0727 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Updated VIF entry in instance network info cache for port 95095173-ff26-4be0-88de-b44051605ee6. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1798.302708] env[63241]: DEBUG nova.network.neutron [req-999b6924-fe5d-46cf-b56f-c8f026417a3d req-c3937ce7-6a29-44dc-b425-fdeb797a0727 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Updating instance_info_cache with network_info: [{"id": "95095173-ff26-4be0-88de-b44051605ee6", "address": "fa:16:3e:95:7a:3f", "network": {"id": "1e0ff0ab-bb23-4187-abf3-c1d13c2971ac", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-634449129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e139fb67702e42d8a8b2401cc6be9303", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95095173-ff", "ovs_interfaceid": "95095173-ff26-4be0-88de-b44051605ee6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1798.326998] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Got total of 14 instances {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1798.327440] env[63241]: WARNING nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] While synchronizing instance power states, found 15 instances in the database and 14 instances on the hypervisor. [ 1798.327440] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid 94a604da-ad3d-415a-aa92-d648e3da803d {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.327652] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid 31e84206-e583-4610-969e-2ccae2d0b206 {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.327834] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid fb5d60fa-fa13-44a1-8291-4645761a0c80 {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.327990] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid 14af9f82-525e-453c-8dc5-ef5b13c67ee4 {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.328195] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid 29b6caa8-a07c-494b-b776-b08affa45c87 {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.328349] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid 1e172f73-972e-4401-b358-512f7e03b27f {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.328495] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.328680] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid e62f49f0-370d-4b5d-ab43-72e0e6238432 {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.329046] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid e28ba013-0bc5-4edc-858d-674980bc8742 {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.329046] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid 037f539f-1bf1-4897-81b3-08c377b92211 {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.329118] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid 6b96988b-cc79-41d7-a17d-277ae5aeb4dc {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.329326] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid 91b65576-47be-4a92-a6fd-8380532c8e1d {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.329499] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid 12b99b2b-56f0-4ce9-8897-f429c2084f38 {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.329650] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid 49d350ff-4932-4759-a3fa-53274c484ae6 {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.329818] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid d1abe122-0259-4f6e-b363-d7c0b1ae7a69 {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1798.330516] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "94a604da-ad3d-415a-aa92-d648e3da803d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.330789] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "94a604da-ad3d-415a-aa92-d648e3da803d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.331339] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "31e84206-e583-4610-969e-2ccae2d0b206" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.331339] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "31e84206-e583-4610-969e-2ccae2d0b206" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.331541] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "fb5d60fa-fa13-44a1-8291-4645761a0c80" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.331741] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "fb5d60fa-fa13-44a1-8291-4645761a0c80" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.331994] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.332202] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.332501] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "29b6caa8-a07c-494b-b776-b08affa45c87" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.332682] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.332937] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "1e172f73-972e-4401-b358-512f7e03b27f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.333128] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "1e172f73-972e-4401-b358-512f7e03b27f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.333394] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.333628] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.333887] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.334169] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "e28ba013-0bc5-4edc-858d-674980bc8742" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.336616] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.002s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.337069] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "037f539f-1bf1-4897-81b3-08c377b92211" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.337547] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "037f539f-1bf1-4897-81b3-08c377b92211" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.337848] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.338127] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.338331] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "91b65576-47be-4a92-a6fd-8380532c8e1d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.338586] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "12b99b2b-56f0-4ce9-8897-f429c2084f38" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.338863] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "49d350ff-4932-4759-a3fa-53274c484ae6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.339109] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.339337] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1798.339499] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1798.340429] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14e9afe-e297-44e8-b899-781335469d5b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.343992] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51826c81-66c7-48c5-bf58-219b420f58a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.348157] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445695c3-ae99-40f6-9f39-1d4707351023 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.351280] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8c080e-5487-40b0-840c-c742c26335ce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.355021] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69273f9e-7781-422a-8879-5a98e12ee708 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.358304] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08a6c42-eb00-478a-8563-1e76ad310cd6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.361993] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddff6b83-73e0-45a7-b94f-8ce3b119a3dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.365625] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74751250-bc85-4f14-bc2d-e4fa34b18705 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.369182] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a78300-8df3-448b-a295-c1937c32c488 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.372545] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b849a0-4b97-4e41-847f-18c5f0ed190d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.375491] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1798.412049] env[63241]: WARNING oslo_messaging._drivers.amqpdriver [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1798.499833] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b1040751-0135-4e41-82fd-b7d620efc704 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.502927] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.650s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.503121] env[63241]: DEBUG nova.objects.instance [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lazy-loading 'pci_requests' on Instance uuid 0e4a3b3a-4464-404f-9154-1ab6f97ae951 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1798.615558] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52397203-a784-733f-9b52-abb9c455c704, 'name': SearchDatastore_Task, 'duration_secs': 0.010242} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.615859] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.616109] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1798.616319] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.642409] env[63241]: DEBUG oslo_vmware.api [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820956, 'name': PowerOnVM_Task, 'duration_secs': 1.153971} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.642653] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1798.642859] env[63241]: INFO nova.compute.manager [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Took 12.71 seconds to spawn the instance on the hypervisor. [ 1798.643056] env[63241]: DEBUG nova.compute.manager [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1798.643803] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f1e3f6-9b82-4ea9-8b88-c0691e3cff00 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.653107] env[63241]: DEBUG nova.objects.instance [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'flavor' on Instance uuid 29b6caa8-a07c-494b-b776-b08affa45c87 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1798.689723] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820958, 'name': CreateVM_Task, 'duration_secs': 0.531729} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.689884] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1798.690608] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.690780] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.691103] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1798.691344] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d10cb3ea-be8f-4b67-b5fb-8574ef5d43e4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.695970] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1798.695970] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ae33d4-71ce-b20c-9c68-0211adc11db6" [ 1798.695970] env[63241]: _type = "Task" [ 1798.695970] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.703774] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ae33d4-71ce-b20c-9c68-0211adc11db6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.759088] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5268d76c-b326-bd8d-dea0-b1c5c2530d5e, 'name': SearchDatastore_Task, 'duration_secs': 0.010592} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.759411] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.759666] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 12b99b2b-56f0-4ce9-8897-f429c2084f38/12b99b2b-56f0-4ce9-8897-f429c2084f38.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1798.759935] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.760143] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1798.760354] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7312e58-72da-4bf0-83c6-1a68f9317395 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.762189] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-950f37af-22f4-40e6-99d4-91f954e6b457 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.769448] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1798.769448] env[63241]: value = "task-1820959" [ 1798.769448] env[63241]: _type = "Task" [ 1798.769448] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.773255] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1798.773255] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1798.773912] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8226c174-12d3-49c7-9ad3-54806ff6bc44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.780456] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.781514] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1798.781514] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527045e3-1317-a64e-33d0-f9c17f7a1f2f" [ 1798.781514] env[63241]: _type = "Task" [ 1798.781514] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.788442] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527045e3-1317-a64e-33d0-f9c17f7a1f2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.792940] env[63241]: DEBUG oslo_concurrency.lockutils [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] Releasing lock "refresh_cache-d1abe122-0259-4f6e-b363-d7c0b1ae7a69" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.793197] env[63241]: DEBUG nova.compute.manager [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Received event network-changed-1c88bbab-4bd7-4ea5-858c-317020381bac {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1798.793411] env[63241]: DEBUG nova.compute.manager [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Refreshing instance network info cache due to event network-changed-1c88bbab-4bd7-4ea5-858c-317020381bac. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1798.793730] env[63241]: DEBUG oslo_concurrency.lockutils [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] Acquiring lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.793898] env[63241]: DEBUG oslo_concurrency.lockutils [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] Acquired lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.794080] env[63241]: DEBUG nova.network.neutron [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Refreshing network info cache for port 1c88bbab-4bd7-4ea5-858c-317020381bac {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1798.805017] env[63241]: DEBUG oslo_concurrency.lockutils [req-999b6924-fe5d-46cf-b56f-c8f026417a3d req-c3937ce7-6a29-44dc-b425-fdeb797a0727 service nova] Releasing lock "refresh_cache-12b99b2b-56f0-4ce9-8897-f429c2084f38" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.890989] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.919873] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "fb5d60fa-fa13-44a1-8291-4645761a0c80" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.588s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.920229] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.588s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.920734] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "94a604da-ad3d-415a-aa92-d648e3da803d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.590s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.928497] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.594s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.928857] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.592s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.929200] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "037f539f-1bf1-4897-81b3-08c377b92211" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.592s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.934230] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "1e172f73-972e-4401-b358-512f7e03b27f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.601s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.934698] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.597s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.935281] env[63241]: INFO nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] During sync_power_state the instance has a pending task (powering-on). Skip. [ 1798.935577] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.603s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.935978] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "31e84206-e583-4610-969e-2ccae2d0b206" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.605s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.008737] env[63241]: DEBUG nova.objects.instance [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lazy-loading 'numa_topology' on Instance uuid 0e4a3b3a-4464-404f-9154-1ab6f97ae951 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1799.010937] env[63241]: DEBUG nova.compute.utils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1799.162561] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.162759] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquired lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.162955] env[63241]: DEBUG nova.network.neutron [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1799.163165] env[63241]: DEBUG nova.objects.instance [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'info_cache' on Instance uuid 29b6caa8-a07c-494b-b776-b08affa45c87 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1799.166458] env[63241]: INFO nova.compute.manager [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Took 19.95 seconds to build instance. [ 1799.206894] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ae33d4-71ce-b20c-9c68-0211adc11db6, 'name': SearchDatastore_Task, 'duration_secs': 0.009554} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.207217] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.207460] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1799.207998] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.279725] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820959, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457381} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.280046] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 12b99b2b-56f0-4ce9-8897-f429c2084f38/12b99b2b-56f0-4ce9-8897-f429c2084f38.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1799.280810] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1799.280810] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79078481-e6c5-4535-8100-ad26105a8e50 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.290948] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527045e3-1317-a64e-33d0-f9c17f7a1f2f, 'name': SearchDatastore_Task, 'duration_secs': 0.009273} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.292770] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1799.292770] env[63241]: value = "task-1820960" [ 1799.292770] env[63241]: _type = "Task" [ 1799.292770] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.292954] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7883c61a-0359-46ff-a541-efd07622781e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.301906] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1799.301906] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52679b47-93b7-bdc6-b152-199c12246dbd" [ 1799.301906] env[63241]: _type = "Task" [ 1799.301906] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.308375] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820960, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.313399] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52679b47-93b7-bdc6-b152-199c12246dbd, 'name': SearchDatastore_Task, 'duration_secs': 0.009887} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.313642] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.313897] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] d1abe122-0259-4f6e-b363-d7c0b1ae7a69/d1abe122-0259-4f6e-b363-d7c0b1ae7a69.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1799.314190] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.314384] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1799.314598] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e697c1be-3194-4d20-b121-8d39e9ad29c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.318243] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-639132b0-7a74-4b17-a70a-5535099d4e07 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.325072] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1799.325072] env[63241]: value = "task-1820961" [ 1799.325072] env[63241]: _type = "Task" [ 1799.325072] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.328965] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1799.329165] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1799.330523] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-728dd213-a96a-4f5c-b7bb-dc3efa417083 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.337320] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820961, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.340161] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1799.340161] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522b42ec-177c-2ef3-1545-89db78d9d25d" [ 1799.340161] env[63241]: _type = "Task" [ 1799.340161] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.348197] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522b42ec-177c-2ef3-1545-89db78d9d25d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.513236] env[63241]: INFO nova.compute.claims [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1799.516584] env[63241]: INFO nova.virt.block_device [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Booting with volume 9d900cbe-b561-4b8b-b228-5471bffb1998 at /dev/sdb [ 1799.566564] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec8e8ae8-e320-4f2b-b01f-4fc643909bfd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.581777] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159084f9-cb8a-4831-b56c-f3f19d5b8660 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.618189] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64fe8bec-6d33-48e6-9fe8-d6b2db8a3d1a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.627447] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e348e5f-e424-4a25-a71e-cb559fcb58d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.665104] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7356868c-0b29-49ee-8a65-1350898b97eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.668785] env[63241]: DEBUG nova.objects.base [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Object Instance<29b6caa8-a07c-494b-b776-b08affa45c87> lazy-loaded attributes: flavor,info_cache {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1799.670454] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4c9b41f3-f702-4855-a43f-70b9739c32df tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Lock "91b65576-47be-4a92-a6fd-8380532c8e1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.459s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.671359] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "91b65576-47be-4a92-a6fd-8380532c8e1d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.333s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.671730] env[63241]: INFO nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] During sync_power_state the instance has a pending task (spawning). Skip. [ 1799.671730] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "91b65576-47be-4a92-a6fd-8380532c8e1d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.675957] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45828845-b53a-4c68-b146-747924beb363 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.691107] env[63241]: DEBUG nova.virt.block_device [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updating existing volume attachment record: 195e56e2-8026-4d35-a787-5b92f71b13de {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1799.754486] env[63241]: DEBUG nova.network.neutron [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updated VIF entry in instance network info cache for port 1c88bbab-4bd7-4ea5-858c-317020381bac. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1799.754881] env[63241]: DEBUG nova.network.neutron [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance_info_cache with network_info: [{"id": "1c88bbab-4bd7-4ea5-858c-317020381bac", "address": "fa:16:3e:b5:7a:04", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c88bbab-4b", "ovs_interfaceid": "1c88bbab-4bd7-4ea5-858c-317020381bac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1799.803934] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820960, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073135} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.804702] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1799.805106] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3817df47-f199-4c3b-8a1f-caf172e8219c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.831563] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 12b99b2b-56f0-4ce9-8897-f429c2084f38/12b99b2b-56f0-4ce9-8897-f429c2084f38.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1799.831563] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c45a2380-2844-4552-a8e2-bf2e33231bfe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.857711] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820961, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.860479] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1799.860479] env[63241]: value = "task-1820963" [ 1799.860479] env[63241]: _type = "Task" [ 1799.860479] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.860657] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522b42ec-177c-2ef3-1545-89db78d9d25d, 'name': SearchDatastore_Task, 'duration_secs': 0.009353} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.863905] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9aa47f48-e2e2-4bf0-ba56-b871eb489502 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.875185] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.876553] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1799.876553] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52233668-80b7-3569-2f73-1b34d8aeb58a" [ 1799.876553] env[63241]: _type = "Task" [ 1799.876553] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.884605] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52233668-80b7-3569-2f73-1b34d8aeb58a, 'name': SearchDatastore_Task, 'duration_secs': 0.009064} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.884846] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.885112] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 49d350ff-4932-4759-a3fa-53274c484ae6/49d350ff-4932-4759-a3fa-53274c484ae6.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1799.885363] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b54d8814-d77f-4d37-91fd-73a2926d630a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.892446] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1799.892446] env[63241]: value = "task-1820965" [ 1799.892446] env[63241]: _type = "Task" [ 1799.892446] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.900420] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820965, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.257528] env[63241]: DEBUG oslo_concurrency.lockutils [req-92ae5029-2437-4bb0-b987-8837d59e716d req-f3e01271-127d-4ddb-b5e9-ea2fea58d678 service nova] Releasing lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.341194] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820961, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.370889] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.404175] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820965, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.493549] env[63241]: DEBUG nova.network.neutron [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Updating instance_info_cache with network_info: [{"id": "7b46f450-f9df-492c-bc52-8760f14afb90", "address": "fa:16:3e:e6:47:b4", "network": {"id": "a8a8022c-fd6d-4145-af3c-875a85e306e6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-472658781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bfa018174324b20863367a034d512da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b46f450-f9", "ovs_interfaceid": "7b46f450-f9df-492c-bc52-8760f14afb90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.724394] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquiring lock "91b65576-47be-4a92-a6fd-8380532c8e1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.724667] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Lock "91b65576-47be-4a92-a6fd-8380532c8e1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.724741] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquiring lock "91b65576-47be-4a92-a6fd-8380532c8e1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.724878] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Lock "91b65576-47be-4a92-a6fd-8380532c8e1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.725561] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Lock "91b65576-47be-4a92-a6fd-8380532c8e1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.729256] env[63241]: INFO nova.compute.manager [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Terminating instance [ 1800.731012] env[63241]: DEBUG nova.compute.manager [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1800.731240] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1800.732087] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b984d1-8273-41c5-85ea-5ddc7fdae8ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.739901] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1800.742150] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2953dc4f-0cbb-4a23-9670-a6852845022d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.748965] env[63241]: DEBUG oslo_vmware.api [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for the task: (returnval){ [ 1800.748965] env[63241]: value = "task-1820967" [ 1800.748965] env[63241]: _type = "Task" [ 1800.748965] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.757881] env[63241]: DEBUG oslo_vmware.api [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.766345] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650914a3-9b1e-40ff-a7a2-d246312136aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.774274] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb285062-c7bf-4a35-9b34-d1eaac324f57 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.805939] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3c3b9e-90a3-4599-b535-66b97edab837 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.814014] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3b2230-e806-4cec-99c6-8e7d22d1cb83 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.827816] env[63241]: DEBUG nova.compute.provider_tree [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1800.840207] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820961, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.432709} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.840514] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] d1abe122-0259-4f6e-b363-d7c0b1ae7a69/d1abe122-0259-4f6e-b363-d7c0b1ae7a69.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1800.840755] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1800.841030] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49fccc16-3f42-4ea7-aa4e-7d061cf94fd5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.847738] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1800.847738] env[63241]: value = "task-1820968" [ 1800.847738] env[63241]: _type = "Task" [ 1800.847738] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.856831] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820968, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.875142] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.905053] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820965, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.996386] env[63241]: DEBUG oslo_concurrency.lockutils [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Releasing lock "refresh_cache-29b6caa8-a07c-494b-b776-b08affa45c87" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1801.259205] env[63241]: DEBUG oslo_vmware.api [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820967, 'name': PowerOffVM_Task, 'duration_secs': 0.223774} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.259484] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1801.259655] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1801.259906] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e346c47c-bd78-4c3a-9abb-c27a2105d36a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.330972] env[63241]: DEBUG nova.scheduler.client.report [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1801.358453] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820968, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099559} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.358700] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1801.359470] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889748e4-53e9-40ab-a3fe-a99cb6634650 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.383832] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] d1abe122-0259-4f6e-b363-d7c0b1ae7a69/d1abe122-0259-4f6e-b363-d7c0b1ae7a69.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1801.384491] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9864629e-2fbb-4998-bcb6-03d94bc427d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.403342] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820963, 'name': ReconfigVM_Task, 'duration_secs': 1.406439} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.407673] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 12b99b2b-56f0-4ce9-8897-f429c2084f38/12b99b2b-56f0-4ce9-8897-f429c2084f38.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1801.408405] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1801.408595] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1801.408771] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Deleting the datastore file [datastore1] 91b65576-47be-4a92-a6fd-8380532c8e1d {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1801.409011] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14e46cc7-e89f-453c-a012-c1bca79c4a04 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.410509] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecbfd44e-784c-4615-b0df-55e70736f59c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.413509] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1801.413509] env[63241]: value = "task-1820970" [ 1801.413509] env[63241]: _type = "Task" [ 1801.413509] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.420937] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820965, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.25588} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.422462] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 49d350ff-4932-4759-a3fa-53274c484ae6/49d350ff-4932-4759-a3fa-53274c484ae6.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1801.422684] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1801.422995] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1801.422995] env[63241]: value = "task-1820971" [ 1801.422995] env[63241]: _type = "Task" [ 1801.422995] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.423248] env[63241]: DEBUG oslo_vmware.api [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for the task: (returnval){ [ 1801.423248] env[63241]: value = "task-1820972" [ 1801.423248] env[63241]: _type = "Task" [ 1801.423248] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.423431] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c168c4b-0342-4fd9-a514-c2f88bc2b8fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.433865] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820970, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.439987] env[63241]: DEBUG oslo_vmware.api [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820972, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.443872] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820971, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.444196] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1801.444196] env[63241]: value = "task-1820973" [ 1801.444196] env[63241]: _type = "Task" [ 1801.444196] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.453808] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820973, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.500212] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1801.500968] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5bb1c95-2b17-45d4-80d4-901190edbfd9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.510062] env[63241]: DEBUG oslo_vmware.api [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1801.510062] env[63241]: value = "task-1820974" [ 1801.510062] env[63241]: _type = "Task" [ 1801.510062] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.519197] env[63241]: DEBUG oslo_vmware.api [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820974, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.836751] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.334s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.839244] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.566s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.839488] env[63241]: DEBUG nova.objects.instance [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lazy-loading 'resources' on Instance uuid b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1801.869968] env[63241]: INFO nova.network.neutron [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updating port 24131a23-55e1-4bd6-8813-5768da05438f with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1801.924660] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820970, 'name': ReconfigVM_Task, 'duration_secs': 0.360408} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.925037] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Reconfigured VM instance instance-00000063 to attach disk [datastore1] d1abe122-0259-4f6e-b363-d7c0b1ae7a69/d1abe122-0259-4f6e-b363-d7c0b1ae7a69.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1801.925860] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7acb4ec3-08da-4343-8e47-60a4a693c472 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.941744] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820971, 'name': Rename_Task, 'duration_secs': 0.155741} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.946607] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1801.948553] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1801.948553] env[63241]: value = "task-1820975" [ 1801.948553] env[63241]: _type = "Task" [ 1801.948553] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.948553] env[63241]: DEBUG oslo_vmware.api [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Task: {'id': task-1820972, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15752} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.948553] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6b25ccb-8ad6-479e-a18e-963303c9f1c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.952754] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1801.956199] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1801.956199] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1801.956199] env[63241]: INFO nova.compute.manager [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1801.956199] env[63241]: DEBUG oslo.service.loopingcall [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1801.958249] env[63241]: DEBUG nova.compute.manager [-] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1801.958388] env[63241]: DEBUG nova.network.neutron [-] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1801.966093] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820973, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080518} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.971990] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1801.972531] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820975, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.972963] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1801.972963] env[63241]: value = "task-1820976" [ 1801.972963] env[63241]: _type = "Task" [ 1801.972963] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.974122] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5195c26-bddb-461b-8b27-50d1e2e14715 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.987193] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820976, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.007772] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 49d350ff-4932-4759-a3fa-53274c484ae6/49d350ff-4932-4759-a3fa-53274c484ae6.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1802.008087] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fba6e3a6-a8f3-43c7-b7b2-7b344f40cef5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.037104] env[63241]: DEBUG oslo_vmware.api [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1820974, 'name': PowerOnVM_Task, 'duration_secs': 0.474616} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.037428] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1802.037428] env[63241]: value = "task-1820978" [ 1802.037428] env[63241]: _type = "Task" [ 1802.037428] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.037674] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1802.037859] env[63241]: DEBUG nova.compute.manager [None req-61d2c219-3529-4e1a-93e4-f38858795651 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1802.038790] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d86534-ce64-4559-a0ad-323ba6bdb2c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.050659] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820978, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.056927] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "1e172f73-972e-4401-b358-512f7e03b27f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.057177] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.460548] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820975, 'name': Rename_Task, 'duration_secs': 0.180453} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.463104] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1802.463649] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a155fc1-1b95-4870-9fe6-5e2443a84bad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.472254] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1802.472254] env[63241]: value = "task-1820979" [ 1802.472254] env[63241]: _type = "Task" [ 1802.472254] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.484921] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820979, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.492556] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820976, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.533678] env[63241]: DEBUG nova.compute.manager [req-52833854-d258-467f-9b5c-dcc3065ceec6 req-85aaa2a3-60e9-4a63-8b8a-2cfd486380a3 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Received event network-vif-deleted-fd9d90f6-0bb7-4052-9f8e-2ad931e916ed {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1802.534018] env[63241]: INFO nova.compute.manager [req-52833854-d258-467f-9b5c-dcc3065ceec6 req-85aaa2a3-60e9-4a63-8b8a-2cfd486380a3 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Neutron deleted interface fd9d90f6-0bb7-4052-9f8e-2ad931e916ed; detaching it from the instance and deleting it from the info cache [ 1802.534231] env[63241]: DEBUG nova.network.neutron [req-52833854-d258-467f-9b5c-dcc3065ceec6 req-85aaa2a3-60e9-4a63-8b8a-2cfd486380a3 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.549443] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820978, 'name': ReconfigVM_Task, 'duration_secs': 0.333798} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.549815] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 49d350ff-4932-4759-a3fa-53274c484ae6/49d350ff-4932-4759-a3fa-53274c484ae6.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1802.552775] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1466ebcb-7862-47be-9567-f41a779db568 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.560631] env[63241]: DEBUG nova.compute.utils [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1802.563419] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1802.563419] env[63241]: value = "task-1820980" [ 1802.563419] env[63241]: _type = "Task" [ 1802.563419] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.578813] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820980, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.605947] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcf1e2c-147f-44f9-ae3d-0e7f2eddf124 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.614262] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09848e95-778f-4763-8471-a9afc844108a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.647897] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b873671-c779-4b32-be35-9686b98e5b1b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.656071] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287fc1b8-afde-4533-a6a8-13d88aaea2e4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.671780] env[63241]: DEBUG nova.compute.provider_tree [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1802.926694] env[63241]: DEBUG nova.network.neutron [-] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.991522] env[63241]: DEBUG oslo_vmware.api [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820979, 'name': PowerOnVM_Task, 'duration_secs': 0.507038} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.994121] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1802.994342] env[63241]: INFO nova.compute.manager [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Took 10.04 seconds to spawn the instance on the hypervisor. [ 1802.994524] env[63241]: DEBUG nova.compute.manager [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1802.994808] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820976, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.995552] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7825e00e-7d44-4bfa-9384-d797a9cf5b53 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.041246] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f92be1b5-839f-4eb3-a064-f9577147e549 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.052212] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7d647e-061c-4617-8bd2-96b5bd8cd477 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.064793] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.079078] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820980, 'name': Rename_Task, 'duration_secs': 0.209634} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.080101] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1803.080337] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe1e3ee5-1822-4def-a2aa-9864f69547f8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.094728] env[63241]: DEBUG nova.compute.manager [req-52833854-d258-467f-9b5c-dcc3065ceec6 req-85aaa2a3-60e9-4a63-8b8a-2cfd486380a3 service nova] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Detach interface failed, port_id=fd9d90f6-0bb7-4052-9f8e-2ad931e916ed, reason: Instance 91b65576-47be-4a92-a6fd-8380532c8e1d could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1803.095781] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1803.095781] env[63241]: value = "task-1820981" [ 1803.095781] env[63241]: _type = "Task" [ 1803.095781] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.104566] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820981, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.175439] env[63241]: DEBUG nova.scheduler.client.report [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1803.429413] env[63241]: INFO nova.compute.manager [-] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Took 1.47 seconds to deallocate network for instance. [ 1803.496580] env[63241]: DEBUG oslo_vmware.api [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820976, 'name': PowerOnVM_Task, 'duration_secs': 1.106146} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.496960] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1803.497304] env[63241]: INFO nova.compute.manager [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Took 15.20 seconds to spawn the instance on the hypervisor. [ 1803.497585] env[63241]: DEBUG nova.compute.manager [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1803.498766] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48025115-175b-4ce4-aabb-43e255ea9534 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.524859] env[63241]: INFO nova.compute.manager [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Took 21.90 seconds to build instance. [ 1803.607138] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820981, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.681591] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.683773] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.325s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.685433] env[63241]: INFO nova.compute.claims [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1803.710985] env[63241]: INFO nova.scheduler.client.report [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted allocations for instance b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e [ 1803.855234] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.855486] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.855709] env[63241]: DEBUG nova.network.neutron [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1803.936558] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.030361] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2849d5a4-113f-4e5a-8c51-2163e06eca47 tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.429s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.030361] env[63241]: INFO nova.compute.manager [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Took 24.39 seconds to build instance. [ 1804.030676] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.691s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.030676] env[63241]: INFO nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] During sync_power_state the instance has a pending task (spawning). Skip. [ 1804.030882] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.107791] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820981, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.128420] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "1e172f73-972e-4401-b358-512f7e03b27f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.128739] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.128975] env[63241]: INFO nova.compute.manager [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Attaching volume 8530001a-3c79-454f-a061-a6f03dce1921 to /dev/sdb [ 1804.167905] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b570fb-dcbd-4cde-ac68-d53d978588ce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.176843] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7eb1ce-0333-457b-946a-896011f62a99 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.195050] env[63241]: DEBUG nova.virt.block_device [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updating existing volume attachment record: 1ac5812d-69b2-4f12-967c-db614487c019 {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1804.220766] env[63241]: DEBUG oslo_concurrency.lockutils [None req-dc3a0b1a-ec8f-4652-99c8-06a1deebec3a tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.150s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.221420] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.888s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.221911] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b22af41d-50db-4ce1-950c-e254c57228cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.234151] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b82c7e2-2fc7-4973-9f3b-2c8d66408c42 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.179994] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9823975e-ec48-40de-bc24-ed0444edde7f tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "12b99b2b-56f0-4ce9-8897-f429c2084f38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.547s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.185967] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.964s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.192537] env[63241]: DEBUG nova.compute.manager [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Received event network-vif-plugged-24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1805.193024] env[63241]: DEBUG oslo_concurrency.lockutils [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] Acquiring lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.193255] env[63241]: DEBUG oslo_concurrency.lockutils [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.193421] env[63241]: DEBUG oslo_concurrency.lockutils [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.193607] env[63241]: DEBUG nova.compute.manager [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] No waiting events found dispatching network-vif-plugged-24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1805.193769] env[63241]: WARNING nova.compute.manager [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Received unexpected event network-vif-plugged-24131a23-55e1-4bd6-8813-5768da05438f for instance with vm_state shelved_offloaded and task_state spawning. [ 1805.193947] env[63241]: DEBUG nova.compute.manager [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Received event network-changed-24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1805.194142] env[63241]: DEBUG nova.compute.manager [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Refreshing instance network info cache due to event network-changed-24131a23-55e1-4bd6-8813-5768da05438f. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1805.194313] env[63241]: DEBUG oslo_concurrency.lockutils [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] Acquiring lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.194506] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "12b99b2b-56f0-4ce9-8897-f429c2084f38" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.856s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.194690] env[63241]: INFO nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] During sync_power_state the instance has a pending task (spawning). Skip. [ 1805.194853] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "12b99b2b-56f0-4ce9-8897-f429c2084f38" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.201156] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.201156] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.209982] env[63241]: DEBUG oslo_vmware.api [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1820981, 'name': PowerOnVM_Task, 'duration_secs': 1.071961} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.211321] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1805.211474] env[63241]: INFO nova.compute.manager [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Took 14.59 seconds to spawn the instance on the hypervisor. [ 1805.211657] env[63241]: DEBUG nova.compute.manager [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1805.212525] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de474e79-0794-4574-af5c-6e786bde4211 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.428118] env[63241]: DEBUG nova.network.neutron [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updating instance_info_cache with network_info: [{"id": "24131a23-55e1-4bd6-8813-5768da05438f", "address": "fa:16:3e:fa:8e:d4", "network": {"id": "355e2d29-1968-4065-94a6-f9e5946a75c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-154610021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d1a62ae45c74a7ba071363005b3a52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24131a23-55", "ovs_interfaceid": "24131a23-55e1-4bd6-8813-5768da05438f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.470096] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e1b18d-14e2-433a-92ee-dd7d568f1214 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.479989] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d106a2-688c-455f-b0ec-c6a8204df034 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.516538] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4069383-76c3-4630-9ba5-2e412e3938b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.525496] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd9ac5d-5132-4178-9a30-2695e9fb0d79 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.542582] env[63241]: DEBUG nova.compute.provider_tree [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1805.692879] env[63241]: DEBUG nova.compute.manager [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1805.693971] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b931fc-d004-47a2-9344-ea3125f1e40f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.703719] env[63241]: DEBUG nova.compute.utils [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1805.735559] env[63241]: INFO nova.compute.manager [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Took 24.85 seconds to build instance. [ 1805.757416] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.931294] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.933763] env[63241]: DEBUG oslo_concurrency.lockutils [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] Acquired lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.933967] env[63241]: DEBUG nova.network.neutron [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Refreshing network info cache for port 24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1805.966910] env[63241]: DEBUG nova.virt.hardware [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='aa42fa30f25bbe4476a972ea18e6c496',container_format='bare',created_at=2024-12-12T15:30:18Z,direct_url=,disk_format='vmdk',id=f4486edd-4cdd-43f5-bb2b-0002ef417a28,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1285185405-shelved',owner='5d1a62ae45c74a7ba071363005b3a52e',properties=ImageMetaProps,protected=,size=31593472,status='active',tags=,updated_at=2024-12-12T15:30:34Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1805.967216] env[63241]: DEBUG nova.virt.hardware [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1805.967386] env[63241]: DEBUG nova.virt.hardware [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1805.967598] env[63241]: DEBUG nova.virt.hardware [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1805.967748] env[63241]: DEBUG nova.virt.hardware [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1805.968180] env[63241]: DEBUG nova.virt.hardware [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1805.968505] env[63241]: DEBUG nova.virt.hardware [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1805.968700] env[63241]: DEBUG nova.virt.hardware [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1805.968883] env[63241]: DEBUG nova.virt.hardware [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1805.969070] env[63241]: DEBUG nova.virt.hardware [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1805.969253] env[63241]: DEBUG nova.virt.hardware [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1805.970374] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5a545f-c6a8-47f3-b2fd-bca5fbb6e9b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.979836] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77831b01-1d2c-4585-8d81-fa681917720b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.996124] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:8e:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bfae3ef8-cae7-455d-8632-ba93e1671625', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24131a23-55e1-4bd6-8813-5768da05438f', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1806.004949] env[63241]: DEBUG oslo.service.loopingcall [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1806.005255] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1806.005486] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3753bd7e-c869-490b-a5be-1ed08d1b7acd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.027398] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1806.027398] env[63241]: value = "task-1820985" [ 1806.027398] env[63241]: _type = "Task" [ 1806.027398] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.041169] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820985, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.046586] env[63241]: DEBUG nova.scheduler.client.report [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1806.208806] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.210141] env[63241]: INFO nova.compute.manager [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] instance snapshotting [ 1806.214216] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28e83c9-56fd-40e7-9c43-b74f2856e73e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.235823] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b5f257-1c87-4972-a87f-bea6de1e9e21 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.238785] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b533e6d8-945e-48d0-9484-4271c8a3138f tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "49d350ff-4932-4759-a3fa-53274c484ae6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.367s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.238889] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "49d350ff-4932-4759-a3fa-53274c484ae6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.900s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.239043] env[63241]: INFO nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] During sync_power_state the instance has a pending task (spawning). Skip. [ 1806.239213] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "49d350ff-4932-4759-a3fa-53274c484ae6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.354836] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.355254] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.355484] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.355702] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.355927] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.358216] env[63241]: INFO nova.compute.manager [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Terminating instance [ 1806.360514] env[63241]: DEBUG nova.compute.manager [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1806.360811] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1806.362052] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7224a887-2a7f-4e56-9b7f-059e714e779e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.371132] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1806.371380] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72737c0b-f050-45d6-ba73-85a2aa9b2587 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.378837] env[63241]: DEBUG oslo_vmware.api [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1806.378837] env[63241]: value = "task-1820986" [ 1806.378837] env[63241]: _type = "Task" [ 1806.378837] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.387499] env[63241]: DEBUG oslo_vmware.api [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.543082] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820985, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.552383] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.868s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.552781] env[63241]: DEBUG nova.compute.manager [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1806.559830] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.806s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.559830] env[63241]: INFO nova.compute.claims [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1806.750799] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1806.751177] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6865465b-9fe3-417b-bb15-0c5235086b39 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.760969] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1806.760969] env[63241]: value = "task-1820987" [ 1806.760969] env[63241]: _type = "Task" [ 1806.760969] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.771787] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820987, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.889813] env[63241]: DEBUG oslo_vmware.api [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820986, 'name': PowerOffVM_Task, 'duration_secs': 0.249431} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.890708] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1806.890959] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1806.891289] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d5c9b3a-6f1a-4c4f-8319-2cbd57bd8bb2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.895366] env[63241]: DEBUG nova.network.neutron [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updated VIF entry in instance network info cache for port 24131a23-55e1-4bd6-8813-5768da05438f. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1806.895709] env[63241]: DEBUG nova.network.neutron [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updating instance_info_cache with network_info: [{"id": "24131a23-55e1-4bd6-8813-5768da05438f", "address": "fa:16:3e:fa:8e:d4", "network": {"id": "355e2d29-1968-4065-94a6-f9e5946a75c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-154610021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d1a62ae45c74a7ba071363005b3a52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24131a23-55", "ovs_interfaceid": "24131a23-55e1-4bd6-8813-5768da05438f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.025931] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1807.026201] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1807.026430] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleting the datastore file [datastore1] d1abe122-0259-4f6e-b363-d7c0b1ae7a69 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1807.026718] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab0deb5e-5719-4d83-8198-ef83a51da97c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.037142] env[63241]: DEBUG oslo_vmware.api [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for the task: (returnval){ [ 1807.037142] env[63241]: value = "task-1820990" [ 1807.037142] env[63241]: _type = "Task" [ 1807.037142] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.045294] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1820985, 'name': CreateVM_Task, 'duration_secs': 0.585344} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.045971] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1807.046773] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.046965] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.047417] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1807.048113] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1de6d603-4338-4212-b008-d3d0e0947524 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.053408] env[63241]: DEBUG oslo_vmware.api [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.057483] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1807.057483] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5242aaea-4735-ec48-af43-9a361e6c5794" [ 1807.057483] env[63241]: _type = "Task" [ 1807.057483] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.063340] env[63241]: DEBUG nova.compute.utils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1807.067965] env[63241]: DEBUG nova.compute.manager [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1807.068181] env[63241]: DEBUG nova.network.neutron [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1807.074282] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5242aaea-4735-ec48-af43-9a361e6c5794, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.126074] env[63241]: DEBUG nova.policy [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54dc853b6f204a75ae7612f9fbd2d1f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecafb0abbdc74501b22b20b797c4c60c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1807.274171] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820987, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.290424] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.290817] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.291177] env[63241]: INFO nova.compute.manager [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Attaching volume a768a43d-46e3-4b6e-b741-89f8691d110a to /dev/sdb [ 1807.333233] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b97532c-ee83-414e-9c51-d8d876e0e6fa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.342348] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8428cfcc-acf4-4375-b0ca-b2852151ea05 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.361503] env[63241]: DEBUG nova.virt.block_device [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Updating existing volume attachment record: 63f0ec33-8d72-4a52-885a-7e3a6e981b23 {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1807.401543] env[63241]: DEBUG oslo_concurrency.lockutils [req-f29b9eb0-aeda-4292-a27e-94ccdf99296b req-b4d91c19-d901-4284-a88a-7b872eda853c service nova] Releasing lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.547102] env[63241]: DEBUG oslo_vmware.api [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Task: {'id': task-1820990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252577} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.547385] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1807.547572] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1807.547745] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1807.547915] env[63241]: INFO nova.compute.manager [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1807.548240] env[63241]: DEBUG oslo.service.loopingcall [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1807.548514] env[63241]: DEBUG nova.compute.manager [-] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1807.548628] env[63241]: DEBUG nova.network.neutron [-] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1807.567412] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.567906] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Processing image f4486edd-4cdd-43f5-bb2b-0002ef417a28 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1807.567995] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28/f4486edd-4cdd-43f5-bb2b-0002ef417a28.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.568125] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28/f4486edd-4cdd-43f5-bb2b-0002ef417a28.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.568321] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1807.568673] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89c4c3ee-feca-4c1c-9938-11de1b9a1b78 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.578220] env[63241]: DEBUG nova.compute.manager [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1807.582483] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1807.582682] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1807.583840] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4806f408-440f-41a1-a46e-7e1dfa822c10 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.592171] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1807.592171] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a266c0-145c-0fef-c360-00d0aab79a9c" [ 1807.592171] env[63241]: _type = "Task" [ 1807.592171] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.599040] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a266c0-145c-0fef-c360-00d0aab79a9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.756144] env[63241]: DEBUG nova.network.neutron [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Successfully created port: db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1807.774797] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820987, 'name': CreateSnapshot_Task, 'duration_secs': 0.630944} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.775126] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1807.775902] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa51c751-1c8f-4de9-a5bc-a6cb42da5ae8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.884640] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0a0264-a0e0-4ef5-a8bf-fb041d3cda8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.894845] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ac0112-792c-47dc-8628-5388531772ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.928261] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b268d7-2661-44df-89f5-262d24df9758 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.936884] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b86b04e-507d-49a0-8daf-c689ae563ef2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.952651] env[63241]: DEBUG nova.compute.provider_tree [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1808.102277] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Preparing fetch location {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1808.102702] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Fetch image to [datastore1] OSTACK_IMG_8f97ff2f-241a-4404-80c8-a5a5572120df/OSTACK_IMG_8f97ff2f-241a-4404-80c8-a5a5572120df.vmdk {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1808.102963] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Downloading stream optimized image f4486edd-4cdd-43f5-bb2b-0002ef417a28 to [datastore1] OSTACK_IMG_8f97ff2f-241a-4404-80c8-a5a5572120df/OSTACK_IMG_8f97ff2f-241a-4404-80c8-a5a5572120df.vmdk on the data store datastore1 as vApp {{(pid=63241) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1808.103171] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Downloading image file data f4486edd-4cdd-43f5-bb2b-0002ef417a28 to the ESX as VM named 'OSTACK_IMG_8f97ff2f-241a-4404-80c8-a5a5572120df' {{(pid=63241) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1808.191682] env[63241]: DEBUG nova.compute.manager [req-1d70eeb9-7be8-42df-892d-90a9e4577f0f req-5fdc24eb-b846-489b-b7c0-bfd95f50415f service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Received event network-vif-deleted-7133056f-eb77-48c4-b773-9923e2a62fc6 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1808.191682] env[63241]: INFO nova.compute.manager [req-1d70eeb9-7be8-42df-892d-90a9e4577f0f req-5fdc24eb-b846-489b-b7c0-bfd95f50415f service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Neutron deleted interface 7133056f-eb77-48c4-b773-9923e2a62fc6; detaching it from the instance and deleting it from the info cache [ 1808.191796] env[63241]: DEBUG nova.network.neutron [req-1d70eeb9-7be8-42df-892d-90a9e4577f0f req-5fdc24eb-b846-489b-b7c0-bfd95f50415f service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.198179] env[63241]: DEBUG oslo_vmware.rw_handles [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1808.198179] env[63241]: value = "resgroup-9" [ 1808.198179] env[63241]: _type = "ResourcePool" [ 1808.198179] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1808.198486] env[63241]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-ac21e5c0-9155-497a-89da-b7aac1c9b340 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.222250] env[63241]: DEBUG nova.compute.manager [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Stashing vm_state: active {{(pid=63241) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1808.232508] env[63241]: DEBUG oslo_vmware.rw_handles [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lease: (returnval){ [ 1808.232508] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528e9836-834d-7799-8189-7a5cf5c382ec" [ 1808.232508] env[63241]: _type = "HttpNfcLease" [ 1808.232508] env[63241]: } obtained for vApp import into resource pool (val){ [ 1808.232508] env[63241]: value = "resgroup-9" [ 1808.232508] env[63241]: _type = "ResourcePool" [ 1808.232508] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1808.233120] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the lease: (returnval){ [ 1808.233120] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528e9836-834d-7799-8189-7a5cf5c382ec" [ 1808.233120] env[63241]: _type = "HttpNfcLease" [ 1808.233120] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1808.257852] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1808.257852] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528e9836-834d-7799-8189-7a5cf5c382ec" [ 1808.257852] env[63241]: _type = "HttpNfcLease" [ 1808.257852] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1808.302354] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1808.302749] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-73e6aa74-00e5-4737-aca6-a8a85781a1d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.313105] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1808.313105] env[63241]: value = "task-1820995" [ 1808.313105] env[63241]: _type = "Task" [ 1808.313105] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.323023] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820995, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.327930] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.328222] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.456172] env[63241]: DEBUG nova.scheduler.client.report [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1808.494363] env[63241]: DEBUG nova.network.neutron [-] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.590837] env[63241]: DEBUG nova.compute.manager [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1808.621310] env[63241]: DEBUG nova.virt.hardware [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1808.621562] env[63241]: DEBUG nova.virt.hardware [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1808.622029] env[63241]: DEBUG nova.virt.hardware [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1808.622029] env[63241]: DEBUG nova.virt.hardware [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1808.622175] env[63241]: DEBUG nova.virt.hardware [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1808.622277] env[63241]: DEBUG nova.virt.hardware [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1808.622413] env[63241]: DEBUG nova.virt.hardware [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1808.622570] env[63241]: DEBUG nova.virt.hardware [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1808.622736] env[63241]: DEBUG nova.virt.hardware [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1808.622896] env[63241]: DEBUG nova.virt.hardware [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1808.623085] env[63241]: DEBUG nova.virt.hardware [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1808.624028] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9dd4a54-5b49-467b-9bee-5da5af8f0008 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.633542] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6cd09a-3875-4a8c-b897-1772daf1224b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.694746] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c2dec6c-68bb-42da-81f5-648797950ef0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.706534] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43820215-7ee6-4508-8f5d-b048d0bb6a09 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.745764] env[63241]: DEBUG nova.compute.manager [req-1d70eeb9-7be8-42df-892d-90a9e4577f0f req-5fdc24eb-b846-489b-b7c0-bfd95f50415f service nova] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Detach interface failed, port_id=7133056f-eb77-48c4-b773-9923e2a62fc6, reason: Instance d1abe122-0259-4f6e-b363-d7c0b1ae7a69 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1808.747113] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.755018] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1808.755018] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528e9836-834d-7799-8189-7a5cf5c382ec" [ 1808.755018] env[63241]: _type = "HttpNfcLease" [ 1808.755018] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1808.824419] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820995, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.831140] env[63241]: DEBUG nova.compute.manager [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1808.963143] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.963702] env[63241]: DEBUG nova.compute.manager [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1808.966968] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.076s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.967159] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.967383] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1808.967670] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.031s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.967891] env[63241]: DEBUG nova.objects.instance [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Lazy-loading 'resources' on Instance uuid 91b65576-47be-4a92-a6fd-8380532c8e1d {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1808.969724] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6499cf-3a6b-4c9e-b5c9-d045a399a0fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.979763] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253c98a6-da6f-42d8-ba26-1ca50dda7ab1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.995560] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29f4e81-5c3b-45cf-aaab-bb0bc0ba434c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.998739] env[63241]: INFO nova.compute.manager [-] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Took 1.45 seconds to deallocate network for instance. [ 1809.006637] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba44003a-3ceb-4877-bea1-d8b7f776fc1e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.037411] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179696MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1809.037720] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.251152] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1809.251385] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377189', 'volume_id': '8530001a-3c79-454f-a061-a6f03dce1921', 'name': 'volume-8530001a-3c79-454f-a061-a6f03dce1921', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1e172f73-972e-4401-b358-512f7e03b27f', 'attached_at': '', 'detached_at': '', 'volume_id': '8530001a-3c79-454f-a061-a6f03dce1921', 'serial': '8530001a-3c79-454f-a061-a6f03dce1921'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1809.252257] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98a51d9-c115-4954-953e-ad283ba7c5de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.261689] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1809.261689] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528e9836-834d-7799-8189-7a5cf5c382ec" [ 1809.261689] env[63241]: _type = "HttpNfcLease" [ 1809.261689] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1809.273860] env[63241]: DEBUG oslo_vmware.rw_handles [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1809.273860] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]528e9836-834d-7799-8189-7a5cf5c382ec" [ 1809.273860] env[63241]: _type = "HttpNfcLease" [ 1809.273860] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1809.274816] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd47c3ea-d68c-4b99-888f-0f4e4c3bbfb2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.277871] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd498a7-7010-45d8-8f77-07c80ee9ab30 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.305218] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] volume-8530001a-3c79-454f-a061-a6f03dce1921/volume-8530001a-3c79-454f-a061-a6f03dce1921.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1809.308680] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f6a05c9-bbeb-4d62-879e-eeaa9f7bd218 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.321911] env[63241]: DEBUG oslo_vmware.rw_handles [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ae9e1-a207-98d9-2156-a9e064cfc500/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1809.322111] env[63241]: DEBUG oslo_vmware.rw_handles [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Creating HTTP connection to write to file with size = 31593472 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ae9e1-a207-98d9-2156-a9e064cfc500/disk-0.vmdk. {{(pid=63241) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1809.395079] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820995, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.395386] env[63241]: DEBUG oslo_vmware.api [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1809.395386] env[63241]: value = "task-1820996" [ 1809.395386] env[63241]: _type = "Task" [ 1809.395386] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.405470] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-771599be-2ab2-4a64-af93-53e3129ab242 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.413357] env[63241]: DEBUG oslo_vmware.api [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.414433] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.419398] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "037f539f-1bf1-4897-81b3-08c377b92211" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.421027] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.471369] env[63241]: DEBUG nova.compute.utils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1809.475603] env[63241]: DEBUG nova.compute.manager [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1809.475603] env[63241]: DEBUG nova.network.neutron [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1809.506925] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.580184] env[63241]: DEBUG nova.policy [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78657a2bc34d4bb9922678ed287530f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18094134f49b4e84b83e97631bc22903', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1809.822126] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543c4087-1bef-460f-93fc-0f3c2ae67378 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.833703] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d1e4da-22bf-454a-8776-eef683a527b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.841417] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820995, 'name': CloneVM_Task} progress is 95%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.886038] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c671376a-5126-4dce-a6aa-134ee6cc9f2a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.895812] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9162dd-ac53-4f00-8b32-c23d95efa24a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.912322] env[63241]: DEBUG oslo_vmware.api [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820996, 'name': ReconfigVM_Task, 'duration_secs': 0.473061} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.923592] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Reconfigured VM instance instance-0000004e to attach disk [datastore1] volume-8530001a-3c79-454f-a061-a6f03dce1921/volume-8530001a-3c79-454f-a061-a6f03dce1921.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1809.933845] env[63241]: DEBUG nova.compute.utils [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1809.933845] env[63241]: DEBUG nova.compute.provider_tree [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.937322] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66629cea-771c-4cd7-a939-2fae6d76558c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.951516] env[63241]: DEBUG nova.network.neutron [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Successfully updated port: db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1809.961717] env[63241]: DEBUG oslo_vmware.api [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1809.961717] env[63241]: value = "task-1820998" [ 1809.961717] env[63241]: _type = "Task" [ 1809.961717] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.975952] env[63241]: DEBUG oslo_vmware.api [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820998, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.983502] env[63241]: DEBUG nova.compute.manager [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1810.118364] env[63241]: DEBUG nova.network.neutron [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Successfully created port: 93ca840a-01ca-4805-8371-2a7fee63b9ee {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1810.221128] env[63241]: DEBUG nova.compute.manager [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Received event network-vif-plugged-db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1810.221389] env[63241]: DEBUG oslo_concurrency.lockutils [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] Acquiring lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.221662] env[63241]: DEBUG oslo_concurrency.lockutils [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] Lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.221849] env[63241]: DEBUG oslo_concurrency.lockutils [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] Lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.222178] env[63241]: DEBUG nova.compute.manager [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] No waiting events found dispatching network-vif-plugged-db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1810.222442] env[63241]: WARNING nova.compute.manager [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Received unexpected event network-vif-plugged-db56e1be-f5b4-4531-8573-93fe90bc8b34 for instance with vm_state building and task_state spawning. [ 1810.222613] env[63241]: DEBUG nova.compute.manager [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Received event network-changed-db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1810.222731] env[63241]: DEBUG nova.compute.manager [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Refreshing instance network info cache due to event network-changed-db56e1be-f5b4-4531-8573-93fe90bc8b34. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1810.222962] env[63241]: DEBUG oslo_concurrency.lockutils [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] Acquiring lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.223221] env[63241]: DEBUG oslo_concurrency.lockutils [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] Acquired lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.223348] env[63241]: DEBUG nova.network.neutron [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Refreshing network info cache for port db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1810.350081] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820995, 'name': CloneVM_Task, 'duration_secs': 1.858821} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.353241] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Created linked-clone VM from snapshot [ 1810.353466] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51f6274-6062-4ad8-9f36-7fcbce296a39 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.367070] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Uploading image 7a3cb4cc-3489-405a-861b-c0d09dbeb789 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1810.388920] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1810.389237] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b9290aca-52f8-4fce-9a2c-689d1cb08d8f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.397830] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1810.397830] env[63241]: value = "task-1820999" [ 1810.397830] env[63241]: _type = "Task" [ 1810.397830] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.407044] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820999, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.451023] env[63241]: DEBUG nova.scheduler.client.report [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1810.458443] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.039s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.459588] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.475722] env[63241]: DEBUG oslo_vmware.api [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1820998, 'name': ReconfigVM_Task, 'duration_secs': 0.180179} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.477670] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377189', 'volume_id': '8530001a-3c79-454f-a061-a6f03dce1921', 'name': 'volume-8530001a-3c79-454f-a061-a6f03dce1921', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1e172f73-972e-4401-b358-512f7e03b27f', 'attached_at': '', 'detached_at': '', 'volume_id': '8530001a-3c79-454f-a061-a6f03dce1921', 'serial': '8530001a-3c79-454f-a061-a6f03dce1921'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1810.583037] env[63241]: DEBUG oslo_vmware.rw_handles [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Completed reading data from the image iterator. {{(pid=63241) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1810.583037] env[63241]: DEBUG oslo_vmware.rw_handles [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ae9e1-a207-98d9-2156-a9e064cfc500/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1810.583037] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1825932-6d78-44c2-b501-5e883e337834 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.591038] env[63241]: DEBUG oslo_vmware.rw_handles [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ae9e1-a207-98d9-2156-a9e064cfc500/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1810.591419] env[63241]: DEBUG oslo_vmware.rw_handles [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ae9e1-a207-98d9-2156-a9e064cfc500/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1810.591779] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-843e1181-6bd9-4c07-b976-48d38d71890b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.762190] env[63241]: DEBUG nova.network.neutron [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1810.871159] env[63241]: DEBUG nova.network.neutron [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.908276] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820999, 'name': Destroy_Task} progress is 33%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.961512] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.994s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.964748] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.207s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.964748] env[63241]: DEBUG nova.objects.instance [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lazy-loading 'pci_requests' on Instance uuid 01af6dc5-e0e7-4f8b-ad07-73af80c32577 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1810.988768] env[63241]: INFO nova.scheduler.client.report [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Deleted allocations for instance 91b65576-47be-4a92-a6fd-8380532c8e1d [ 1811.001263] env[63241]: DEBUG nova.compute.manager [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1811.034600] env[63241]: DEBUG nova.virt.hardware [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1811.035302] env[63241]: DEBUG nova.virt.hardware [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1811.035302] env[63241]: DEBUG nova.virt.hardware [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1811.035302] env[63241]: DEBUG nova.virt.hardware [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1811.035503] env[63241]: DEBUG nova.virt.hardware [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1811.035698] env[63241]: DEBUG nova.virt.hardware [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1811.035964] env[63241]: DEBUG nova.virt.hardware [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1811.036263] env[63241]: DEBUG nova.virt.hardware [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1811.036557] env[63241]: DEBUG nova.virt.hardware [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1811.036751] env[63241]: DEBUG nova.virt.hardware [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1811.036974] env[63241]: DEBUG nova.virt.hardware [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1811.038364] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9aeca4-73a8-4b21-bfe1-cbac78a98946 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.051735] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2460f94-f1d1-46c1-a6d5-26bf107ef77f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.126805] env[63241]: DEBUG oslo_vmware.rw_handles [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522ae9e1-a207-98d9-2156-a9e064cfc500/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1811.127114] env[63241]: INFO nova.virt.vmwareapi.images [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Downloaded image file data f4486edd-4cdd-43f5-bb2b-0002ef417a28 [ 1811.128059] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367b13d6-8272-4a36-81c4-eba33e9785db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.147022] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b95e2d7-b47c-4e33-ad23-3fb5aff0596e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.180902] env[63241]: INFO nova.virt.vmwareapi.images [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] The imported VM was unregistered [ 1811.181067] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Caching image {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1811.181265] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Creating directory with path [datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28 {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1811.181558] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3cd8ec1-8730-4410-a687-b89fc3dac201 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.200187] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Created directory with path [datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28 {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1811.200400] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_8f97ff2f-241a-4404-80c8-a5a5572120df/OSTACK_IMG_8f97ff2f-241a-4404-80c8-a5a5572120df.vmdk to [datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28/f4486edd-4cdd-43f5-bb2b-0002ef417a28.vmdk. {{(pid=63241) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1811.200665] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-aa9b030c-0e4f-4c08-9406-1e31ef3a004d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.211894] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1811.211894] env[63241]: value = "task-1821001" [ 1811.211894] env[63241]: _type = "Task" [ 1811.211894] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.221161] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821001, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.374073] env[63241]: DEBUG oslo_concurrency.lockutils [req-3c2b4746-94ac-4157-8d1b-4f0d23054fff req-5aa16028-b5f5-4d14-a8b6-6350a6b897fa service nova] Releasing lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.374941] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.374941] env[63241]: DEBUG nova.network.neutron [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1811.409580] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1820999, 'name': Destroy_Task, 'duration_secs': 0.667771} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.409911] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Destroyed the VM [ 1811.410169] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1811.410717] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-aaf11ac3-79ee-4c9e-8910-0bacee1b13dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.417617] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1811.417617] env[63241]: value = "task-1821002" [ 1811.417617] env[63241]: _type = "Task" [ 1811.417617] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.426373] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821002, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.468076] env[63241]: DEBUG nova.objects.instance [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lazy-loading 'numa_topology' on Instance uuid 01af6dc5-e0e7-4f8b-ad07-73af80c32577 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1811.491507] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "037f539f-1bf1-4897-81b3-08c377b92211" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.491777] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.492029] env[63241]: INFO nova.compute.manager [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Attaching volume 5ab7cf9d-57ae-427f-a604-75e49549ee99 to /dev/sdb [ 1811.498914] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e22563f8-0451-451e-9428-4c7c787cd050 tempest-ServerMetadataNegativeTestJSON-1979722159 tempest-ServerMetadataNegativeTestJSON-1979722159-project-member] Lock "91b65576-47be-4a92-a6fd-8380532c8e1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.774s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.528862] env[63241]: DEBUG nova.objects.instance [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lazy-loading 'flavor' on Instance uuid 1e172f73-972e-4401-b358-512f7e03b27f {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1811.551233] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea3475d-1a49-480a-a8dc-dee07d443b25 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.561073] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97b7185-1dde-47d9-84fc-5d6145fa612f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.580741] env[63241]: DEBUG nova.virt.block_device [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Updating existing volume attachment record: 435cc2bd-5b05-4fad-abfb-4daf73d6c6b0 {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1811.726213] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821001, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.820575] env[63241]: DEBUG nova.network.neutron [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Successfully updated port: 93ca840a-01ca-4805-8371-2a7fee63b9ee {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1811.908638] env[63241]: DEBUG nova.network.neutron [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1811.924079] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1811.924369] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377193', 'volume_id': 'a768a43d-46e3-4b6e-b741-89f8691d110a', 'name': 'volume-a768a43d-46e3-4b6e-b741-89f8691d110a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e62f49f0-370d-4b5d-ab43-72e0e6238432', 'attached_at': '', 'detached_at': '', 'volume_id': 'a768a43d-46e3-4b6e-b741-89f8691d110a', 'serial': 'a768a43d-46e3-4b6e-b741-89f8691d110a'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1811.925383] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa21b5b-53a5-49dd-8c6c-6b36d8d91523 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.952366] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821002, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.964678] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1380b3-e6c4-4fdb-a44b-334d1431266f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.992612] env[63241]: INFO nova.compute.claims [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1812.003061] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] volume-a768a43d-46e3-4b6e-b741-89f8691d110a/volume-a768a43d-46e3-4b6e-b741-89f8691d110a.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1812.003961] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2701d67-5f5f-475a-99b3-7f33f2004e3d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.027995] env[63241]: DEBUG oslo_vmware.api [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1812.027995] env[63241]: value = "task-1821006" [ 1812.027995] env[63241]: _type = "Task" [ 1812.027995] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.035401] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8bf38ae8-f4b1-443f-b532-59549bd61f31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.907s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.041986] env[63241]: DEBUG oslo_vmware.api [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821006, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.068957] env[63241]: INFO nova.compute.manager [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Rescuing [ 1812.068957] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.068957] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.069237] env[63241]: DEBUG nova.network.neutron [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1812.145852] env[63241]: DEBUG nova.network.neutron [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updating instance_info_cache with network_info: [{"id": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "address": "fa:16:3e:9f:6a:40", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb56e1be-f5", "ovs_interfaceid": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.227296] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821001, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.337972] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "refresh_cache-6055a56d-1e0d-47bc-930b-b62206a0263e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.337972] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "refresh_cache-6055a56d-1e0d-47bc-930b-b62206a0263e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.337972] env[63241]: DEBUG nova.network.neutron [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1812.381699] env[63241]: DEBUG nova.compute.manager [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Received event network-vif-plugged-93ca840a-01ca-4805-8371-2a7fee63b9ee {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1812.382160] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] Acquiring lock "6055a56d-1e0d-47bc-930b-b62206a0263e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.382160] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] Lock "6055a56d-1e0d-47bc-930b-b62206a0263e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.382609] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] Lock "6055a56d-1e0d-47bc-930b-b62206a0263e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.382856] env[63241]: DEBUG nova.compute.manager [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] No waiting events found dispatching network-vif-plugged-93ca840a-01ca-4805-8371-2a7fee63b9ee {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1812.383106] env[63241]: WARNING nova.compute.manager [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Received unexpected event network-vif-plugged-93ca840a-01ca-4805-8371-2a7fee63b9ee for instance with vm_state building and task_state spawning. [ 1812.383951] env[63241]: DEBUG nova.compute.manager [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Received event network-changed-93ca840a-01ca-4805-8371-2a7fee63b9ee {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1812.383951] env[63241]: DEBUG nova.compute.manager [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Refreshing instance network info cache due to event network-changed-93ca840a-01ca-4805-8371-2a7fee63b9ee. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1812.383951] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] Acquiring lock "refresh_cache-6055a56d-1e0d-47bc-930b-b62206a0263e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.435638] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821002, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.548105] env[63241]: DEBUG oslo_vmware.api [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821006, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.648409] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.648877] env[63241]: DEBUG nova.compute.manager [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Instance network_info: |[{"id": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "address": "fa:16:3e:9f:6a:40", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb56e1be-f5", "ovs_interfaceid": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1812.649323] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:6a:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f4a795c-8718-4a7c-aafe-9da231df10f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db56e1be-f5b4-4531-8573-93fe90bc8b34', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1812.657967] env[63241]: DEBUG oslo.service.loopingcall [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1812.660929] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1812.662363] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b873401d-fef0-4efb-840f-ca38db09ebe0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.689677] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1812.689677] env[63241]: value = "task-1821007" [ 1812.689677] env[63241]: _type = "Task" [ 1812.689677] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.703249] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821007, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.727876] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821001, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.923856] env[63241]: DEBUG nova.network.neutron [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1812.933443] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821002, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.045363] env[63241]: DEBUG oslo_vmware.api [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821006, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.127171] env[63241]: DEBUG nova.network.neutron [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updating instance_info_cache with network_info: [{"id": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "address": "fa:16:3e:17:1d:18", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbeb829e-4c", "ovs_interfaceid": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.206623] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821007, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.229275] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821001, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.346261] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ca68bc-7599-489c-8b1c-faea621b2f3d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.357188] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cedd491-871a-428b-a4ea-d4ea290a8140 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.396199] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd9483b-97c7-4504-b67c-7704bc206836 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.408892] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ea31e8-dbcf-4fa3-a143-2e0b7e261f90 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.433025] env[63241]: DEBUG nova.compute.provider_tree [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1813.443455] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821002, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.542933] env[63241]: DEBUG oslo_vmware.api [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821006, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.564018] env[63241]: DEBUG nova.network.neutron [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Updating instance_info_cache with network_info: [{"id": "93ca840a-01ca-4805-8371-2a7fee63b9ee", "address": "fa:16:3e:90:2d:b7", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93ca840a-01", "ovs_interfaceid": "93ca840a-01ca-4805-8371-2a7fee63b9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.630564] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.703481] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821007, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.725581] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821001, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.932777] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821002, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.937059] env[63241]: DEBUG nova.scheduler.client.report [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1814.042464] env[63241]: DEBUG oslo_vmware.api [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821006, 'name': ReconfigVM_Task, 'duration_secs': 1.531788} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.042778] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Reconfigured VM instance instance-0000005b to attach disk [datastore1] volume-a768a43d-46e3-4b6e-b741-89f8691d110a/volume-a768a43d-46e3-4b6e-b741-89f8691d110a.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1814.048963] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95cb6a84-3332-4d92-9885-f3b53a35fc47 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.066551] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "refresh_cache-6055a56d-1e0d-47bc-930b-b62206a0263e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.067018] env[63241]: DEBUG nova.compute.manager [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Instance network_info: |[{"id": "93ca840a-01ca-4805-8371-2a7fee63b9ee", "address": "fa:16:3e:90:2d:b7", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93ca840a-01", "ovs_interfaceid": "93ca840a-01ca-4805-8371-2a7fee63b9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1814.068783] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] Acquired lock "refresh_cache-6055a56d-1e0d-47bc-930b-b62206a0263e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.069060] env[63241]: DEBUG nova.network.neutron [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Refreshing network info cache for port 93ca840a-01ca-4805-8371-2a7fee63b9ee {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1814.070420] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:2d:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '184687d6-125a-4b58-bb5b-fdb404088eda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93ca840a-01ca-4805-8371-2a7fee63b9ee', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1814.078226] env[63241]: DEBUG oslo.service.loopingcall [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1814.078632] env[63241]: DEBUG oslo_vmware.api [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1814.078632] env[63241]: value = "task-1821009" [ 1814.078632] env[63241]: _type = "Task" [ 1814.078632] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.079224] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1814.079610] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ffcc67dc-78c7-4ff6-8193-af69b909f252 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.109373] env[63241]: DEBUG oslo_vmware.api [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821009, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.110504] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1814.110504] env[63241]: value = "task-1821010" [ 1814.110504] env[63241]: _type = "Task" [ 1814.110504] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.120212] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821010, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.165254] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1814.165516] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-422aa15b-6301-4566-8fa2-2332b1768bce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.174230] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1814.174230] env[63241]: value = "task-1821011" [ 1814.174230] env[63241]: _type = "Task" [ 1814.174230] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.185465] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821011, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.202492] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821007, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.225912] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821001, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.431111] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821002, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.441952] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.478s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.444241] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.697s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.477858] env[63241]: INFO nova.network.neutron [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updating port 7a0be842-edfe-48ff-9275-dbb260c7e781 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1814.594965] env[63241]: DEBUG oslo_vmware.api [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821009, 'name': ReconfigVM_Task, 'duration_secs': 0.207556} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.595411] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377193', 'volume_id': 'a768a43d-46e3-4b6e-b741-89f8691d110a', 'name': 'volume-a768a43d-46e3-4b6e-b741-89f8691d110a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e62f49f0-370d-4b5d-ab43-72e0e6238432', 'attached_at': '', 'detached_at': '', 'volume_id': 'a768a43d-46e3-4b6e-b741-89f8691d110a', 'serial': 'a768a43d-46e3-4b6e-b741-89f8691d110a'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1814.624920] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821010, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.684737] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821011, 'name': PowerOffVM_Task, 'duration_secs': 0.252464} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.684952] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1814.685824] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed6c242-9445-4c13-88c2-f733a9ac92be {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.710706] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eed3af5-410f-447a-aa36-c57adddef107 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.726224] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821007, 'name': CreateVM_Task, 'duration_secs': 1.940128} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.726769] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1814.727777] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.727777] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.728349] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1814.728483] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3ea26b1-fce0-4bb6-b5a0-07d3427c0f80 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.734666] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821001, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.357244} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.734666] env[63241]: INFO nova.virt.vmwareapi.ds_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_8f97ff2f-241a-4404-80c8-a5a5572120df/OSTACK_IMG_8f97ff2f-241a-4404-80c8-a5a5572120df.vmdk to [datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28/f4486edd-4cdd-43f5-bb2b-0002ef417a28.vmdk. [ 1814.734666] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Cleaning up location [datastore1] OSTACK_IMG_8f97ff2f-241a-4404-80c8-a5a5572120df {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1814.734666] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_8f97ff2f-241a-4404-80c8-a5a5572120df {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1814.734950] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72788e63-16ef-4be1-9d23-1134295f94be {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.737768] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1814.737768] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527d6718-dc5b-d921-8e7b-ae4f883e41b0" [ 1814.737768] env[63241]: _type = "Task" [ 1814.737768] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.749042] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1814.749042] env[63241]: value = "task-1821012" [ 1814.749042] env[63241]: _type = "Task" [ 1814.749042] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.755138] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1814.755138] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c368b272-d6d2-4d41-a359-4f06bd861023 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.763297] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527d6718-dc5b-d921-8e7b-ae4f883e41b0, 'name': SearchDatastore_Task, 'duration_secs': 0.022048} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.767779] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.768051] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1814.768302] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.768456] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.768663] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1814.768946] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.769283] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1814.769283] env[63241]: value = "task-1821013" [ 1814.769283] env[63241]: _type = "Task" [ 1814.769283] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.769472] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-067be48e-6628-44ef-953a-9342b646c8a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.782490] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1814.782706] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1814.782941] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.785137] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1814.785137] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1814.787725] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfb4f344-b41b-4dea-ab6c-c353ec3c10bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.791853] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1814.791853] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e9106d-8ebb-59e3-964b-864fa9387e39" [ 1814.791853] env[63241]: _type = "Task" [ 1814.791853] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.801248] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e9106d-8ebb-59e3-964b-864fa9387e39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.932795] env[63241]: DEBUG oslo_vmware.api [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821002, 'name': RemoveSnapshot_Task, 'duration_secs': 3.342376} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.933089] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1814.949389] env[63241]: INFO nova.compute.claims [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1814.972591] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "e28ba013-0bc5-4edc-858d-674980bc8742" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.972885] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.973082] env[63241]: DEBUG nova.compute.manager [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1814.974237] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc0f1a0-21de-462b-aa19-30fe0d2360a4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.983199] env[63241]: DEBUG nova.compute.manager [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63241) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1814.984184] env[63241]: DEBUG nova.objects.instance [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'flavor' on Instance uuid e28ba013-0bc5-4edc-858d-674980bc8742 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1815.121784] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821010, 'name': CreateVM_Task, 'duration_secs': 0.848992} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.121983] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1815.122651] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.122733] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.125421] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1815.125421] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11296200-c92c-4c41-97c7-12c7fbbfced1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.129158] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1815.129158] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5270c3ac-bb69-e2d8-a416-9d39ea724a65" [ 1815.129158] env[63241]: _type = "Task" [ 1815.129158] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.137931] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5270c3ac-bb69-e2d8-a416-9d39ea724a65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.140377] env[63241]: DEBUG nova.network.neutron [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Updated VIF entry in instance network info cache for port 93ca840a-01ca-4805-8371-2a7fee63b9ee. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1815.140699] env[63241]: DEBUG nova.network.neutron [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Updating instance_info_cache with network_info: [{"id": "93ca840a-01ca-4805-8371-2a7fee63b9ee", "address": "fa:16:3e:90:2d:b7", "network": {"id": "67451ff6-4999-4fda-bda2-d7fb1e9a45d2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1624699611-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18094134f49b4e84b83e97631bc22903", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "184687d6-125a-4b58-bb5b-fdb404088eda", "external-id": "nsx-vlan-transportzone-134", "segmentation_id": 134, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93ca840a-01", "ovs_interfaceid": "93ca840a-01ca-4805-8371-2a7fee63b9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.263941] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.060463} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.263941] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1815.263941] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28/f4486edd-4cdd-43f5-bb2b-0002ef417a28.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.264197] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28/f4486edd-4cdd-43f5-bb2b-0002ef417a28.vmdk to [datastore1] 0e4a3b3a-4464-404f-9154-1ab6f97ae951/0e4a3b3a-4464-404f-9154-1ab6f97ae951.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1815.264303] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3d2bfd8-338d-4a1c-9eee-e07980487391 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.278025] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1815.278025] env[63241]: value = "task-1821014" [ 1815.278025] env[63241]: _type = "Task" [ 1815.278025] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.285217] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.304441] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e9106d-8ebb-59e3-964b-864fa9387e39, 'name': SearchDatastore_Task, 'duration_secs': 0.020091} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.305379] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72f967df-85ab-4ce2-990d-83e04dc2807b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.313606] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1815.313606] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523ab308-508b-88fd-2781-af6fe1f33ad5" [ 1815.313606] env[63241]: _type = "Task" [ 1815.313606] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.324765] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523ab308-508b-88fd-2781-af6fe1f33ad5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.438154] env[63241]: WARNING nova.compute.manager [None req-b4a2f0c9-1160-42f0-9587-d6ff715d12d2 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Image not found during snapshot: nova.exception.ImageNotFound: Image 7a3cb4cc-3489-405a-861b-c0d09dbeb789 could not be found. [ 1815.455996] env[63241]: INFO nova.compute.resource_tracker [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating resource usage from migration 9b0fd3a6-0f57-4279-b77c-70066c90abbc [ 1815.489624] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1815.489915] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63d92671-50a5-481e-95ff-9f8fa10b8e9e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.504109] env[63241]: DEBUG oslo_vmware.api [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1815.504109] env[63241]: value = "task-1821015" [ 1815.504109] env[63241]: _type = "Task" [ 1815.504109] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.517748] env[63241]: DEBUG oslo_vmware.api [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.641811] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5270c3ac-bb69-e2d8-a416-9d39ea724a65, 'name': SearchDatastore_Task, 'duration_secs': 0.010474} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.642926] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.642926] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1815.642926] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.643440] env[63241]: DEBUG nova.objects.instance [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lazy-loading 'flavor' on Instance uuid e62f49f0-370d-4b5d-ab43-72e0e6238432 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1815.645027] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3d9d4e2-1615-4f11-9369-cfc79d4676f9 req-70f15282-07e1-43be-aa24-946744d4253e service nova] Releasing lock "refresh_cache-6055a56d-1e0d-47bc-930b-b62206a0263e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.737248] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7eaabb0-698e-4f23-8d81-4966848bb175 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.747581] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f04b27-3337-45dc-9b90-a651866e2d1d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.786066] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b668404-8dc4-47f6-b291-6d27dcccc085 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.795563] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821014, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.800098] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a65fc85-e1dc-45a6-a533-8bcb76752abd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.819711] env[63241]: DEBUG nova.compute.provider_tree [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1815.834584] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523ab308-508b-88fd-2781-af6fe1f33ad5, 'name': SearchDatastore_Task, 'duration_secs': 0.01059} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.834934] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.835245] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] d7d5b5a1-bfe9-43a1-b8f1-0a0048562530/d7d5b5a1-bfe9-43a1-b8f1-0a0048562530.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1815.835597] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.835801] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1815.836079] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-410d10c9-dd5c-479a-a88e-e087b18809b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.839198] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6601c1c9-7dc0-4ea4-a3d7-5fd48bf0eee8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.849151] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1815.849151] env[63241]: value = "task-1821016" [ 1815.849151] env[63241]: _type = "Task" [ 1815.849151] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.857133] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1815.857345] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1815.861185] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abda115c-2314-47f9-a504-ece4d9cd5703 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.864753] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.871048] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1815.871048] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f66984-b698-0309-4cd8-23c047ba7e46" [ 1815.871048] env[63241]: _type = "Task" [ 1815.871048] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.882069] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f66984-b698-0309-4cd8-23c047ba7e46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.017478] env[63241]: DEBUG oslo_vmware.api [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821015, 'name': PowerOffVM_Task, 'duration_secs': 0.353524} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.018033] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1816.018234] env[63241]: DEBUG nova.compute.manager [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1816.019162] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4e8afc-d7e9-4c55-9163-693d7759e3f6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.127271] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1816.127537] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377197', 'volume_id': '5ab7cf9d-57ae-427f-a604-75e49549ee99', 'name': 'volume-5ab7cf9d-57ae-427f-a604-75e49549ee99', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '037f539f-1bf1-4897-81b3-08c377b92211', 'attached_at': '', 'detached_at': '', 'volume_id': '5ab7cf9d-57ae-427f-a604-75e49549ee99', 'serial': '5ab7cf9d-57ae-427f-a604-75e49549ee99'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1816.128873] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e565de-c4a1-41ca-8091-2fe4b6e6f805 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.155285] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ac0e2d3-96ce-4d1d-9b72-4a5e02599216 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.865s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.156755] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c800d3-2a41-4a31-92dc-98cf35710f2e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.185957] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] volume-5ab7cf9d-57ae-427f-a604-75e49549ee99/volume-5ab7cf9d-57ae-427f-a604-75e49549ee99.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1816.187031] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0088b211-6fb2-40d3-b9bf-1d81ce0cebe1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.208293] env[63241]: DEBUG oslo_vmware.api [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1816.208293] env[63241]: value = "task-1821017" [ 1816.208293] env[63241]: _type = "Task" [ 1816.208293] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.221018] env[63241]: DEBUG oslo_vmware.api [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.296806] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821014, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.326228] env[63241]: DEBUG nova.scheduler.client.report [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1816.363772] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.391020] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f66984-b698-0309-4cd8-23c047ba7e46, 'name': SearchDatastore_Task, 'duration_secs': 0.086302} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.391020] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7184130e-2db4-4f50-8817-659f0e6a903e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.399480] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1816.399480] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52894aab-cb90-6658-680a-7eeef954c653" [ 1816.399480] env[63241]: _type = "Task" [ 1816.399480] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.423152] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52894aab-cb90-6658-680a-7eeef954c653, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.518966] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "12b99b2b-56f0-4ce9-8897-f429c2084f38" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.519345] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "12b99b2b-56f0-4ce9-8897-f429c2084f38" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.519490] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "12b99b2b-56f0-4ce9-8897-f429c2084f38-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.519888] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "12b99b2b-56f0-4ce9-8897-f429c2084f38-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.520034] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "12b99b2b-56f0-4ce9-8897-f429c2084f38-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.522698] env[63241]: INFO nova.compute.manager [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Terminating instance [ 1816.526020] env[63241]: DEBUG nova.compute.manager [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1816.526317] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1816.527828] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68e7bcc-382b-4c2b-aa9c-baa439e12ffc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.538211] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1c6787b4-87f1-4ae4-9497-a4808fa6a2ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.565s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.542367] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1816.542673] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18fa1988-f924-455d-b377-586a0db8a6d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.556172] env[63241]: DEBUG oslo_vmware.api [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1816.556172] env[63241]: value = "task-1821018" [ 1816.556172] env[63241]: _type = "Task" [ 1816.556172] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.571857] env[63241]: DEBUG oslo_vmware.api [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.723377] env[63241]: DEBUG oslo_vmware.api [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821017, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.795799] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821014, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.832610] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.388s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.832773] env[63241]: INFO nova.compute.manager [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Migrating [ 1816.846499] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.809s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.875773] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.915933] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52894aab-cb90-6658-680a-7eeef954c653, 'name': SearchDatastore_Task, 'duration_secs': 0.090361} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.917597] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.918121] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 1e172f73-972e-4401-b358-512f7e03b27f/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. {{(pid=63241) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1816.919634] env[63241]: DEBUG nova.compute.manager [req-4dcbaddc-7aab-4b3a-9e96-04aedc66ab72 req-a020d9f4-f166-431b-9c1d-1b8a7bd4178e service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Received event network-vif-plugged-7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1816.919961] env[63241]: DEBUG oslo_concurrency.lockutils [req-4dcbaddc-7aab-4b3a-9e96-04aedc66ab72 req-a020d9f4-f166-431b-9c1d-1b8a7bd4178e service nova] Acquiring lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.920283] env[63241]: DEBUG oslo_concurrency.lockutils [req-4dcbaddc-7aab-4b3a-9e96-04aedc66ab72 req-a020d9f4-f166-431b-9c1d-1b8a7bd4178e service nova] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.920458] env[63241]: DEBUG oslo_concurrency.lockutils [req-4dcbaddc-7aab-4b3a-9e96-04aedc66ab72 req-a020d9f4-f166-431b-9c1d-1b8a7bd4178e service nova] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.920634] env[63241]: DEBUG nova.compute.manager [req-4dcbaddc-7aab-4b3a-9e96-04aedc66ab72 req-a020d9f4-f166-431b-9c1d-1b8a7bd4178e service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] No waiting events found dispatching network-vif-plugged-7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1816.920899] env[63241]: WARNING nova.compute.manager [req-4dcbaddc-7aab-4b3a-9e96-04aedc66ab72 req-a020d9f4-f166-431b-9c1d-1b8a7bd4178e service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Received unexpected event network-vif-plugged-7a0be842-edfe-48ff-9275-dbb260c7e781 for instance with vm_state shelved_offloaded and task_state spawning. [ 1816.921296] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.921518] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1816.922105] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74bf0a19-f237-4866-a073-c8afa0fd134a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.927353] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44ba046f-5297-4365-b2b3-7d24b1852b0f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.941705] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1816.941705] env[63241]: value = "task-1821019" [ 1816.941705] env[63241]: _type = "Task" [ 1816.941705] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.948554] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1816.948812] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1816.949677] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be5b8295-1c31-4af7-be4d-a8360bdc7bc3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.957398] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.961826] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1816.961826] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527a397b-be3b-e0d1-4aeb-71c5d15f2cf1" [ 1816.961826] env[63241]: _type = "Task" [ 1816.961826] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.975930] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527a397b-be3b-e0d1-4aeb-71c5d15f2cf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.069581] env[63241]: DEBUG oslo_vmware.api [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.108030] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.108270] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.108467] env[63241]: DEBUG nova.network.neutron [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1817.221162] env[63241]: DEBUG oslo_vmware.api [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821017, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.295601] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821014, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.364409] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.364632] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.364890] env[63241]: DEBUG nova.network.neutron [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1817.378430] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.455135] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.476894] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527a397b-be3b-e0d1-4aeb-71c5d15f2cf1, 'name': SearchDatastore_Task, 'duration_secs': 0.094902} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.479153] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26073169-8ba8-4c95-8654-2b88f53ff3bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.485509] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1817.485509] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52cd386a-9e9f-b95d-7b63-e7711405dbaf" [ 1817.485509] env[63241]: _type = "Task" [ 1817.485509] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.496436] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52cd386a-9e9f-b95d-7b63-e7711405dbaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.567907] env[63241]: DEBUG oslo_vmware.api [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821018, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.709180] env[63241]: DEBUG nova.objects.instance [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'flavor' on Instance uuid e28ba013-0bc5-4edc-858d-674980bc8742 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1817.727816] env[63241]: DEBUG oslo_vmware.api [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821017, 'name': ReconfigVM_Task, 'duration_secs': 1.415785} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.728863] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Reconfigured VM instance instance-0000005d to attach disk [datastore1] volume-5ab7cf9d-57ae-427f-a604-75e49549ee99/volume-5ab7cf9d-57ae-427f-a604-75e49549ee99.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1817.744244] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56b5c19a-dab2-4ec4-9de3-bc550942568e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.787893] env[63241]: DEBUG oslo_vmware.api [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1817.787893] env[63241]: value = "task-1821020" [ 1817.787893] env[63241]: _type = "Task" [ 1817.787893] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.818832] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821014, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.440487} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.826037] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f4486edd-4cdd-43f5-bb2b-0002ef417a28/f4486edd-4cdd-43f5-bb2b-0002ef417a28.vmdk to [datastore1] 0e4a3b3a-4464-404f-9154-1ab6f97ae951/0e4a3b3a-4464-404f-9154-1ab6f97ae951.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1817.827374] env[63241]: DEBUG oslo_vmware.api [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821020, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.828190] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9afac95-1f05-4c7c-8a5e-3b481daed6e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.861768] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 0e4a3b3a-4464-404f-9154-1ab6f97ae951/0e4a3b3a-4464-404f-9154-1ab6f97ae951.vmdk or device None with type streamOptimized {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1817.864819] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d02b098-6935-41bc-a25d-87dc2cc32ee9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.881777] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Applying migration context for instance 49d350ff-4932-4759-a3fa-53274c484ae6 as it has an incoming, in-progress migration 9b0fd3a6-0f57-4279-b77c-70066c90abbc. Migration status is pre-migrating {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1817.884348] env[63241]: INFO nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating resource usage from migration 9b0fd3a6-0f57-4279-b77c-70066c90abbc [ 1817.900223] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821016, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.902295] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1817.902295] env[63241]: value = "task-1821021" [ 1817.902295] env[63241]: _type = "Task" [ 1817.902295] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.917305] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821021, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.926131] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 94a604da-ad3d-415a-aa92-d648e3da803d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.926306] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 31e84206-e583-4610-969e-2ccae2d0b206 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.926541] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance fb5d60fa-fa13-44a1-8291-4645761a0c80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.926772] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 14af9f82-525e-453c-8dc5-ef5b13c67ee4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.926838] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 29b6caa8-a07c-494b-b776-b08affa45c87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.926965] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 1e172f73-972e-4401-b358-512f7e03b27f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.927100] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e62f49f0-370d-4b5d-ab43-72e0e6238432 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.927217] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e28ba013-0bc5-4edc-858d-674980bc8742 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.927330] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 037f539f-1bf1-4897-81b3-08c377b92211 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.927527] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 6b96988b-cc79-41d7-a17d-277ae5aeb4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.927648] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 12b99b2b-56f0-4ce9-8897-f429c2084f38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.927793] env[63241]: WARNING nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance d1abe122-0259-4f6e-b363-d7c0b1ae7a69 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1817.927954] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 0e4a3b3a-4464-404f-9154-1ab6f97ae951 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.928132] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 01af6dc5-e0e7-4f8b-ad07-73af80c32577 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.928236] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance d7d5b5a1-bfe9-43a1-b8f1-0a0048562530 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.928349] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 6055a56d-1e0d-47bc-930b-b62206a0263e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.928509] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Migration 9b0fd3a6-0f57-4279-b77c-70066c90abbc is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1817.928622] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 49d350ff-4932-4759-a3fa-53274c484ae6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1817.956493] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.001743] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52cd386a-9e9f-b95d-7b63-e7711405dbaf, 'name': SearchDatastore_Task, 'duration_secs': 0.098006} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.002153] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.002425] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 6055a56d-1e0d-47bc-930b-b62206a0263e/6055a56d-1e0d-47bc-930b-b62206a0263e.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1818.002700] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cdece6e5-61e6-4185-a0f8-ed7b2d9ac608 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.011920] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1818.011920] env[63241]: value = "task-1821022" [ 1818.011920] env[63241]: _type = "Task" [ 1818.011920] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.021911] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821022, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.070510] env[63241]: DEBUG oslo_vmware.api [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821018, 'name': PowerOffVM_Task, 'duration_secs': 1.074685} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.070788] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1818.071061] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1818.071391] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e757b6b5-60b8-4717-afc3-10c4c60cb85d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.220913] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.221146] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.221329] env[63241]: DEBUG nova.network.neutron [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1818.221510] env[63241]: DEBUG nova.objects.instance [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'info_cache' on Instance uuid e28ba013-0bc5-4edc-858d-674980bc8742 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1818.306789] env[63241]: DEBUG nova.network.neutron [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updating instance_info_cache with network_info: [{"id": "7a0be842-edfe-48ff-9275-dbb260c7e781", "address": "fa:16:3e:aa:cc:cf", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0be842-ed", "ovs_interfaceid": "7a0be842-edfe-48ff-9275-dbb260c7e781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.318351] env[63241]: DEBUG oslo_vmware.api [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821020, 'name': ReconfigVM_Task, 'duration_secs': 0.226314} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.318937] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377197', 'volume_id': '5ab7cf9d-57ae-427f-a604-75e49549ee99', 'name': 'volume-5ab7cf9d-57ae-427f-a604-75e49549ee99', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '037f539f-1bf1-4897-81b3-08c377b92211', 'attached_at': '', 'detached_at': '', 'volume_id': '5ab7cf9d-57ae-427f-a604-75e49549ee99', 'serial': '5ab7cf9d-57ae-427f-a604-75e49549ee99'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1818.393336] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821016, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.210436} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.393570] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] d7d5b5a1-bfe9-43a1-b8f1-0a0048562530/d7d5b5a1-bfe9-43a1-b8f1-0a0048562530.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1818.393814] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1818.394109] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d51441aa-6c74-432d-9df3-71a37cb31e48 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.403575] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1818.403575] env[63241]: value = "task-1821024" [ 1818.403575] env[63241]: _type = "Task" [ 1818.403575] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.418499] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821024, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.422194] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821021, 'name': ReconfigVM_Task, 'duration_secs': 0.356293} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.422579] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 0e4a3b3a-4464-404f-9154-1ab6f97ae951/0e4a3b3a-4464-404f-9154-1ab6f97ae951.vmdk or device None with type streamOptimized {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1818.424766] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5fa1ad0c-a4fd-4bf4-8da1-cd4e37934a2c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.430181] env[63241]: DEBUG nova.network.neutron [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance_info_cache with network_info: [{"id": "1c88bbab-4bd7-4ea5-858c-317020381bac", "address": "fa:16:3e:b5:7a:04", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c88bbab-4b", "ovs_interfaceid": "1c88bbab-4bd7-4ea5-858c-317020381bac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.432433] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 779d2380-be6c-4fdb-8755-10e99f8a6fd9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1818.432746] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1818.433605] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1818.438233] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1818.438233] env[63241]: value = "task-1821025" [ 1818.438233] env[63241]: _type = "Task" [ 1818.438233] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.451989] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821025, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.458972] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821019, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.475155] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "6f3cbd23-30b9-4502-be07-2edd0a701291" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.475155] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "6f3cbd23-30b9-4502-be07-2edd0a701291" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.528389] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821022, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.725092] env[63241]: DEBUG nova.objects.base [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1818.734833] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9015f7ce-cc43-47af-839a-b8cb34ab8cb8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.745234] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8ecc36-d365-48b5-8fae-353dd3410306 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.781176] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8340d9d-60ce-4360-af6a-dc08bfae544a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.790653] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd07688-c707-4a01-8759-a3880c456c4d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.806097] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1818.809646] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.838714] env[63241]: DEBUG nova.virt.hardware [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='59db778773e471cda54789021d3f543e',container_format='bare',created_at=2024-12-12T15:30:19Z,direct_url=,disk_format='vmdk',id=509bfeca-5406-4a2d-b9c1-64cb54f16cd4,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-213583440-shelved',owner='5d257d51a2254f5386fd3348602e5b71',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2024-12-12T15:30:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1818.839124] env[63241]: DEBUG nova.virt.hardware [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1818.839302] env[63241]: DEBUG nova.virt.hardware [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1818.839490] env[63241]: DEBUG nova.virt.hardware [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1818.839641] env[63241]: DEBUG nova.virt.hardware [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1818.839885] env[63241]: DEBUG nova.virt.hardware [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1818.840154] env[63241]: DEBUG nova.virt.hardware [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1818.840332] env[63241]: DEBUG nova.virt.hardware [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1818.840510] env[63241]: DEBUG nova.virt.hardware [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1818.840674] env[63241]: DEBUG nova.virt.hardware [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1818.840850] env[63241]: DEBUG nova.virt.hardware [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1818.841764] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54715ca2-28ec-48e6-a4c3-419fc883196b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.850621] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b482e7a-0804-4ef2-82cd-56029c2ca2ae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.867716] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:cc:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a0be842-edfe-48ff-9275-dbb260c7e781', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1818.875537] env[63241]: DEBUG oslo.service.loopingcall [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1818.876750] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1818.877009] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73460dc0-479d-4df5-9cdd-8b4f27487ac7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.899354] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1818.899354] env[63241]: value = "task-1821026" [ 1818.899354] env[63241]: _type = "Task" [ 1818.899354] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.908546] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.917401] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821024, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.203043} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.917717] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1818.918599] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2107136a-f181-4812-aae9-26c9d13ae2c3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.945094] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] d7d5b5a1-bfe9-43a1-b8f1-0a0048562530/d7d5b5a1-bfe9-43a1-b8f1-0a0048562530.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1818.946831] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.948213] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41fca6fb-59fb-4283-a4f0-09d8a2b2ebf5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.964972] env[63241]: DEBUG nova.compute.manager [req-f35568bd-146c-4df5-8a5f-47d28d62aa15 req-81e8b8df-0001-4faa-ae26-cd0190d30192 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Received event network-changed-7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1818.965201] env[63241]: DEBUG nova.compute.manager [req-f35568bd-146c-4df5-8a5f-47d28d62aa15 req-81e8b8df-0001-4faa-ae26-cd0190d30192 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Refreshing instance network info cache due to event network-changed-7a0be842-edfe-48ff-9275-dbb260c7e781. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1818.965494] env[63241]: DEBUG oslo_concurrency.lockutils [req-f35568bd-146c-4df5-8a5f-47d28d62aa15 req-81e8b8df-0001-4faa-ae26-cd0190d30192 service nova] Acquiring lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.965625] env[63241]: DEBUG oslo_concurrency.lockutils [req-f35568bd-146c-4df5-8a5f-47d28d62aa15 req-81e8b8df-0001-4faa-ae26-cd0190d30192 service nova] Acquired lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.966134] env[63241]: DEBUG nova.network.neutron [req-f35568bd-146c-4df5-8a5f-47d28d62aa15 req-81e8b8df-0001-4faa-ae26-cd0190d30192 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Refreshing network info cache for port 7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1818.993111] env[63241]: DEBUG nova.compute.manager [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1818.993999] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821019, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.644781} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.994326] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1818.994326] env[63241]: value = "task-1821027" [ 1818.994326] env[63241]: _type = "Task" [ 1818.994326] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.994536] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821025, 'name': Rename_Task, 'duration_secs': 0.171855} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.994781] env[63241]: INFO nova.virt.vmwareapi.ds_util [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 1e172f73-972e-4401-b358-512f7e03b27f/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. [ 1818.995148] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1818.999020] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9346f4f-7336-4a1f-a472-27d67226941e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.002208] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-984206d5-395a-4707-bac6-6adc8499e052 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.035593] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821027, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.048176] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 1e172f73-972e-4401-b358-512f7e03b27f/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1819.052472] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-393f0e95-dc7d-4615-a262-bf1ae2e292fe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.065578] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1819.065578] env[63241]: value = "task-1821028" [ 1819.065578] env[63241]: _type = "Task" [ 1819.065578] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.073738] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821022, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.977467} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.075386] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 6055a56d-1e0d-47bc-930b-b62206a0263e/6055a56d-1e0d-47bc-930b-b62206a0263e.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1819.075595] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1819.075910] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1819.075910] env[63241]: value = "task-1821029" [ 1819.075910] env[63241]: _type = "Task" [ 1819.075910] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.076137] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc82b8b9-70df-4e7b-8981-e535ce23aa3e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.083972] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821028, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.090323] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821029, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.091727] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1819.091727] env[63241]: value = "task-1821030" [ 1819.091727] env[63241]: _type = "Task" [ 1819.091727] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.330890] env[63241]: ERROR nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [req-65fa1a11-ed05-4bdf-a0a2-14b630ca856c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-65fa1a11-ed05-4bdf-a0a2-14b630ca856c"}]} [ 1819.351543] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1819.366466] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1819.366802] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1819.377293] env[63241]: DEBUG nova.objects.instance [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lazy-loading 'flavor' on Instance uuid 037f539f-1bf1-4897-81b3-08c377b92211 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1819.380034] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1819.399811] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1819.414076] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.514600] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821027, 'name': ReconfigVM_Task, 'duration_secs': 0.401032} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.515508] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Reconfigured VM instance instance-00000064 to attach disk [datastore1] d7d5b5a1-bfe9-43a1-b8f1-0a0048562530/d7d5b5a1-bfe9-43a1-b8f1-0a0048562530.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1819.516171] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ee71f79-52ea-4bee-a66c-e5d91adb47a6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.523011] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.530014] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1819.530014] env[63241]: value = "task-1821031" [ 1819.530014] env[63241]: _type = "Task" [ 1819.530014] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.542955] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821031, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.579279] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821028, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.588329] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821029, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.603250] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821030, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.177306} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.603528] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1819.606675] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4175f4-2210-46b2-8bb4-09002d3a996e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.629640] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 6055a56d-1e0d-47bc-930b-b62206a0263e/6055a56d-1e0d-47bc-930b-b62206a0263e.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1819.632335] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6d1161b-f5f3-4f57-8273-5dfe2f5180d8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.653149] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1819.653149] env[63241]: value = "task-1821032" [ 1819.653149] env[63241]: _type = "Task" [ 1819.653149] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.663910] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821032, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.724687] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a28e8d1-9c91-4264-93cb-2cc722563b7f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.732729] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e8325b-6503-4c96-bb9c-8b05dd66d2b8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.768098] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9687df48-2945-4e91-a837-4b229fff776b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.776172] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b873d24a-463e-455a-8180-6996fd7848ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.789856] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1819.792249] env[63241]: DEBUG nova.network.neutron [req-f35568bd-146c-4df5-8a5f-47d28d62aa15 req-81e8b8df-0001-4faa-ae26-cd0190d30192 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updated VIF entry in instance network info cache for port 7a0be842-edfe-48ff-9275-dbb260c7e781. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1819.792581] env[63241]: DEBUG nova.network.neutron [req-f35568bd-146c-4df5-8a5f-47d28d62aa15 req-81e8b8df-0001-4faa-ae26-cd0190d30192 service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updating instance_info_cache with network_info: [{"id": "7a0be842-edfe-48ff-9275-dbb260c7e781", "address": "fa:16:3e:aa:cc:cf", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0be842-ed", "ovs_interfaceid": "7a0be842-edfe-48ff-9275-dbb260c7e781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1819.882867] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e95c8d85-63d6-4077-b29a-8b2549e5f655 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.391s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.913481] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.964907] env[63241]: DEBUG nova.network.neutron [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance_info_cache with network_info: [{"id": "6be29b8b-a0d6-4346-b774-5faf878f177c", "address": "fa:16:3e:0f:60:27", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be29b8b-a0", "ovs_interfaceid": "6be29b8b-a0d6-4346-b774-5faf878f177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.041027] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821031, 'name': Rename_Task, 'duration_secs': 0.190805} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.041173] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1820.041621] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d70214b5-f49b-467b-9f93-532b0b898c0b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.048576] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1820.048576] env[63241]: value = "task-1821033" [ 1820.048576] env[63241]: _type = "Task" [ 1820.048576] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.056959] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.076466] env[63241]: DEBUG oslo_vmware.api [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821028, 'name': PowerOnVM_Task, 'duration_secs': 0.725492} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.077026] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1820.092001] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821029, 'name': ReconfigVM_Task, 'duration_secs': 0.649614} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.092589] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 1e172f73-972e-4401-b358-512f7e03b27f/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1820.093695] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18aa0f60-9187-4265-814f-2f936cccae2a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.127756] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94b76f45-0858-4bb1-baf8-29108ef441a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.144134] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1820.144134] env[63241]: value = "task-1821034" [ 1820.144134] env[63241]: _type = "Task" [ 1820.144134] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.154241] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821034, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.164454] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821032, 'name': ReconfigVM_Task, 'duration_secs': 0.31511} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.165055] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 6055a56d-1e0d-47bc-930b-b62206a0263e/6055a56d-1e0d-47bc-930b-b62206a0263e.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1820.165502] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-13095d9f-3eba-436b-8408-c7168489a8e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.175222] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1820.175222] env[63241]: value = "task-1821035" [ 1820.175222] env[63241]: _type = "Task" [ 1820.175222] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.183994] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821035, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.197688] env[63241]: DEBUG nova.compute.manager [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1820.198628] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4beb677d-1569-43c5-8f45-3e83329d9de2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.296532] env[63241]: DEBUG oslo_concurrency.lockutils [req-f35568bd-146c-4df5-8a5f-47d28d62aa15 req-81e8b8df-0001-4faa-ae26-cd0190d30192 service nova] Releasing lock "refresh_cache-01af6dc5-e0e7-4f8b-ad07-73af80c32577" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.329384] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 150 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1820.329585] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 150 to 151 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1820.329739] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1820.413794] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.467762] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.481081] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e182e57-202c-4bb7-b623-12b7ada3c7a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.500345] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance '49d350ff-4932-4759-a3fa-53274c484ae6' progress to 0 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1820.558615] env[63241]: DEBUG oslo_vmware.api [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821033, 'name': PowerOnVM_Task, 'duration_secs': 0.477103} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.558938] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1820.559220] env[63241]: INFO nova.compute.manager [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Took 11.97 seconds to spawn the instance on the hypervisor. [ 1820.559449] env[63241]: DEBUG nova.compute.manager [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1820.560297] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16adb638-9d96-49ba-8422-eebcfcf89d57 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.655589] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821034, 'name': ReconfigVM_Task, 'duration_secs': 0.417983} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.655866] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1820.656189] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e329ead1-1704-4084-9633-4e35ce2faf51 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.663375] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1820.663375] env[63241]: value = "task-1821036" [ 1820.663375] env[63241]: _type = "Task" [ 1820.663375] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.673729] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.682453] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821035, 'name': Rename_Task, 'duration_secs': 0.187896} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.682733] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1820.682974] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-581dc348-226e-42c3-b04f-9dd40db1ee71 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.689050] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1820.689050] env[63241]: value = "task-1821037" [ 1820.689050] env[63241]: _type = "Task" [ 1820.689050] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.696542] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821037, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.721045] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb310e4f-e5e6-4111-9371-b337adfb6a97 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 26.900s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.773884] env[63241]: DEBUG oslo_concurrency.lockutils [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "037f539f-1bf1-4897-81b3-08c377b92211" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.774299] env[63241]: DEBUG oslo_concurrency.lockutils [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.835592] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1820.835825] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.990s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.836113] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.422s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.837699] env[63241]: INFO nova.compute.claims [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1820.914317] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.971351] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1820.971677] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1eaa8d4c-3528-4a7a-b4b3-c8bf1824bbad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.980951] env[63241]: DEBUG oslo_vmware.api [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1820.980951] env[63241]: value = "task-1821038" [ 1820.980951] env[63241]: _type = "Task" [ 1820.980951] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.989962] env[63241]: DEBUG oslo_vmware.api [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821038, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.007194] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1821.007504] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-702337a3-a498-476d-9d5c-d457b5354cb4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.015823] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1821.015823] env[63241]: value = "task-1821039" [ 1821.015823] env[63241]: _type = "Task" [ 1821.015823] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.029055] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.079742] env[63241]: INFO nova.compute.manager [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Took 24.74 seconds to build instance. [ 1821.179310] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821036, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.204435] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821037, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.279610] env[63241]: INFO nova.compute.manager [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Detaching volume 5ab7cf9d-57ae-427f-a604-75e49549ee99 [ 1821.330168] env[63241]: INFO nova.virt.block_device [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Attempting to driver detach volume 5ab7cf9d-57ae-427f-a604-75e49549ee99 from mountpoint /dev/sdb [ 1821.330420] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1821.330625] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377197', 'volume_id': '5ab7cf9d-57ae-427f-a604-75e49549ee99', 'name': 'volume-5ab7cf9d-57ae-427f-a604-75e49549ee99', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '037f539f-1bf1-4897-81b3-08c377b92211', 'attached_at': '', 'detached_at': '', 'volume_id': '5ab7cf9d-57ae-427f-a604-75e49549ee99', 'serial': '5ab7cf9d-57ae-427f-a604-75e49549ee99'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1821.331599] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98fff88f-6de7-4f2e-b33e-c65a9e0e66d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.361052] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b393ab0-adcb-4ac7-9e88-8f41bd9df7bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.368341] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612e7173-a8a5-4a58-beff-95b3f7c86c55 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.394699] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e271cd16-71d3-4e42-abf9-f4f9598ec415 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.414655] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] The volume has not been displaced from its original location: [datastore1] volume-5ab7cf9d-57ae-427f-a604-75e49549ee99/volume-5ab7cf9d-57ae-427f-a604-75e49549ee99.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1821.420527] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Reconfiguring VM instance instance-0000005d to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1821.424412] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0239eeb9-508c-49e5-86b8-9a1fd910e167 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.444324] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.446130] env[63241]: DEBUG oslo_vmware.api [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1821.446130] env[63241]: value = "task-1821040" [ 1821.446130] env[63241]: _type = "Task" [ 1821.446130] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.454947] env[63241]: DEBUG oslo_vmware.api [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821040, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.491520] env[63241]: DEBUG oslo_vmware.api [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821038, 'name': PowerOnVM_Task, 'duration_secs': 0.48952} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.491811] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1821.492114] env[63241]: DEBUG nova.compute.manager [None req-e9e21b96-7725-4d8e-8c99-06f6a07c5577 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1821.492903] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19437705-ba37-49e2-952e-60b439e728af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.527902] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821039, 'name': PowerOffVM_Task, 'duration_secs': 0.23161} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.528671] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1821.528906] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance '49d350ff-4932-4759-a3fa-53274c484ae6' progress to 17 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1821.582561] env[63241]: DEBUG oslo_concurrency.lockutils [None req-0cfec89c-40ae-4519-87e8-b4d41ed26567 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.252s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.676728] env[63241]: DEBUG oslo_vmware.api [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821036, 'name': PowerOnVM_Task, 'duration_secs': 0.588679} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.677167] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1821.680709] env[63241]: DEBUG nova.compute.manager [None req-a9f7d4cd-6dfb-4d67-9be3-4cfdf459d26a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1821.681985] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1458b340-2648-4889-8bf8-aa66b600bdf7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.706573] env[63241]: DEBUG oslo_vmware.api [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821037, 'name': PowerOnVM_Task, 'duration_secs': 0.637196} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.706904] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1821.707169] env[63241]: INFO nova.compute.manager [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Took 10.71 seconds to spawn the instance on the hypervisor. [ 1821.707442] env[63241]: DEBUG nova.compute.manager [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1821.708580] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685203ec-99c1-4245-b97c-646c752b08c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.929980] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.959275] env[63241]: DEBUG oslo_vmware.api [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821040, 'name': ReconfigVM_Task, 'duration_secs': 0.469469} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.959947] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Reconfigured VM instance instance-0000005d to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1821.965502] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b173e45-44f6-49e4-a10e-dc53cc319351 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.988627] env[63241]: DEBUG oslo_vmware.api [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1821.988627] env[63241]: value = "task-1821041" [ 1821.988627] env[63241]: _type = "Task" [ 1821.988627] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.003272] env[63241]: DEBUG oslo_vmware.api [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821041, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.038738] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1822.039310] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1822.039643] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1822.039993] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1822.042078] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1822.042078] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1822.042078] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1822.042078] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1822.042078] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1822.042078] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1822.042078] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1822.048441] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-944818dd-1215-4707-96da-5a97617793bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.076440] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1822.076440] env[63241]: value = "task-1821042" [ 1822.076440] env[63241]: _type = "Task" [ 1822.076440] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.088146] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821042, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.127986] env[63241]: DEBUG nova.compute.manager [req-b23d383f-c7c8-41c3-9ab3-6816e44e1aaa req-905fc254-15bb-42fa-9ed7-0a4c89f542a6 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Received event network-changed-6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1822.127986] env[63241]: DEBUG nova.compute.manager [req-b23d383f-c7c8-41c3-9ab3-6816e44e1aaa req-905fc254-15bb-42fa-9ed7-0a4c89f542a6 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing instance network info cache due to event network-changed-6f97669d-a2c6-4625-a1b6-374f5565ebb0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1822.127986] env[63241]: DEBUG oslo_concurrency.lockutils [req-b23d383f-c7c8-41c3-9ab3-6816e44e1aaa req-905fc254-15bb-42fa-9ed7-0a4c89f542a6 service nova] Acquiring lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.128710] env[63241]: DEBUG oslo_concurrency.lockutils [req-b23d383f-c7c8-41c3-9ab3-6816e44e1aaa req-905fc254-15bb-42fa-9ed7-0a4c89f542a6 service nova] Acquired lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.128710] env[63241]: DEBUG nova.network.neutron [req-b23d383f-c7c8-41c3-9ab3-6816e44e1aaa req-905fc254-15bb-42fa-9ed7-0a4c89f542a6 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing network info cache for port 6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1822.229586] env[63241]: INFO nova.compute.manager [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Took 24.51 seconds to build instance. [ 1822.237214] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3352e52-bb2e-4f0e-8439-8d5d8db329ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.246277] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99a4396-2039-4df2-84cd-4eb02a0d6627 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.287651] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec9359e-b321-4c4b-881b-e7b22c230508 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.298746] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8be1e9-1efb-417d-a507-4d54e060193e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.316870] env[63241]: DEBUG nova.compute.provider_tree [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1822.426751] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.495590] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd228427-2500-4cf4-b380-ca13b28877a9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.509950] env[63241]: DEBUG oslo_vmware.api [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821041, 'name': ReconfigVM_Task, 'duration_secs': 0.189509} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.511806] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377197', 'volume_id': '5ab7cf9d-57ae-427f-a604-75e49549ee99', 'name': 'volume-5ab7cf9d-57ae-427f-a604-75e49549ee99', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '037f539f-1bf1-4897-81b3-08c377b92211', 'attached_at': '', 'detached_at': '', 'volume_id': '5ab7cf9d-57ae-427f-a604-75e49549ee99', 'serial': '5ab7cf9d-57ae-427f-a604-75e49549ee99'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1822.514089] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5153dc73-6df8-474d-92d0-7b32636eb7b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Suspending the VM {{(pid=63241) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1822.514540] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-42be9054-fc60-497c-b22c-09eace833911 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.522392] env[63241]: DEBUG oslo_vmware.api [None req-5153dc73-6df8-474d-92d0-7b32636eb7b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1822.522392] env[63241]: value = "task-1821043" [ 1822.522392] env[63241]: _type = "Task" [ 1822.522392] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.532177] env[63241]: DEBUG oslo_vmware.api [None req-5153dc73-6df8-474d-92d0-7b32636eb7b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821043, 'name': SuspendVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.588225] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821042, 'name': ReconfigVM_Task, 'duration_secs': 0.286294} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.588625] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance '49d350ff-4932-4759-a3fa-53274c484ae6' progress to 33 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1822.731724] env[63241]: DEBUG oslo_concurrency.lockutils [None req-78f02d34-f879-4a36-bfc4-5df54196146d tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "6055a56d-1e0d-47bc-930b-b62206a0263e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.026s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.820799] env[63241]: DEBUG nova.scheduler.client.report [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1822.936597] env[63241]: INFO nova.compute.manager [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Unrescuing [ 1822.936991] env[63241]: DEBUG oslo_concurrency.lockutils [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.937212] env[63241]: DEBUG oslo_concurrency.lockutils [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquired lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.937418] env[63241]: DEBUG nova.network.neutron [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1822.942626] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.008410] env[63241]: DEBUG nova.network.neutron [req-b23d383f-c7c8-41c3-9ab3-6816e44e1aaa req-905fc254-15bb-42fa-9ed7-0a4c89f542a6 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updated VIF entry in instance network info cache for port 6f97669d-a2c6-4625-a1b6-374f5565ebb0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1823.008809] env[63241]: DEBUG nova.network.neutron [req-b23d383f-c7c8-41c3-9ab3-6816e44e1aaa req-905fc254-15bb-42fa-9ed7-0a4c89f542a6 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updating instance_info_cache with network_info: [{"id": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "address": "fa:16:3e:0e:96:49", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f97669d-a2", "ovs_interfaceid": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.036528] env[63241]: DEBUG oslo_vmware.api [None req-5153dc73-6df8-474d-92d0-7b32636eb7b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821043, 'name': SuspendVM_Task} progress is 58%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.065914] env[63241]: DEBUG nova.objects.instance [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lazy-loading 'flavor' on Instance uuid 037f539f-1bf1-4897-81b3-08c377b92211 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1823.095182] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1823.095455] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1823.095662] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1823.095904] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1823.096145] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1823.096353] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1823.096642] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1823.096950] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1823.097230] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1823.097468] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1823.097720] env[63241]: DEBUG nova.virt.hardware [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1823.104495] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Reconfiguring VM instance instance-00000062 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1823.105939] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f47b008-6444-44af-83aa-a45dd25dfe1f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.128611] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1823.128611] env[63241]: value = "task-1821044" [ 1823.128611] env[63241]: _type = "Task" [ 1823.128611] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.139090] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821044, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.330026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.492s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.330026] env[63241]: DEBUG nova.compute.manager [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1823.336009] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.828s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.340236] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.340236] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.816s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.342258] env[63241]: INFO nova.compute.claims [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1823.382309] env[63241]: INFO nova.scheduler.client.report [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Deleted allocations for instance d1abe122-0259-4f6e-b363-d7c0b1ae7a69 [ 1823.431107] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.514488] env[63241]: DEBUG oslo_concurrency.lockutils [req-b23d383f-c7c8-41c3-9ab3-6816e44e1aaa req-905fc254-15bb-42fa-9ed7-0a4c89f542a6 service nova] Releasing lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.537809] env[63241]: DEBUG oslo_vmware.api [None req-5153dc73-6df8-474d-92d0-7b32636eb7b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821043, 'name': SuspendVM_Task, 'duration_secs': 0.870038} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.539080] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5153dc73-6df8-474d-92d0-7b32636eb7b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Suspended the VM {{(pid=63241) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1823.539354] env[63241]: DEBUG nova.compute.manager [None req-5153dc73-6df8-474d-92d0-7b32636eb7b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1823.540435] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce60b92-a02a-4a1a-95ac-45729379ccad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.639861] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821044, 'name': ReconfigVM_Task, 'duration_secs': 0.235792} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.640286] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Reconfigured VM instance instance-00000062 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1823.641209] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd72d330-1414-4682-9aef-73ba48e48f34 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.668174] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 49d350ff-4932-4759-a3fa-53274c484ae6/49d350ff-4932-4759-a3fa-53274c484ae6.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1823.672616] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3652d4cd-4cbc-4eb6-b817-3387c3cfc11f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.696708] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1823.696708] env[63241]: value = "task-1821045" [ 1823.696708] env[63241]: _type = "Task" [ 1823.696708] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.709106] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821045, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.848514] env[63241]: DEBUG nova.compute.utils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1823.850276] env[63241]: DEBUG nova.compute.manager [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1823.850469] env[63241]: DEBUG nova.network.neutron [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1823.889910] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d91b242c-9162-4c5f-ae41-761c074aac7f tempest-ServerDiskConfigTestJSON-1298437658 tempest-ServerDiskConfigTestJSON-1298437658-project-member] Lock "d1abe122-0259-4f6e-b363-d7c0b1ae7a69" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.535s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.928788] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.994623] env[63241]: DEBUG nova.policy [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de6df2e8caaa4c0c82c94f9d107a8e17', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6964b0dd75c4704b8f5cacd2c8e355f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1824.000637] env[63241]: DEBUG nova.network.neutron [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updating instance_info_cache with network_info: [{"id": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "address": "fa:16:3e:17:1d:18", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbeb829e-4c", "ovs_interfaceid": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.075231] env[63241]: DEBUG oslo_concurrency.lockutils [None req-df203fcb-b9d1-438f-b284-67439587baea tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.300s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.189203] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "6055a56d-1e0d-47bc-930b-b62206a0263e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.189503] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "6055a56d-1e0d-47bc-930b-b62206a0263e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.189743] env[63241]: DEBUG nova.compute.manager [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1824.190757] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a26b9d-9c58-4529-a03f-dedb05687d91 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.203509] env[63241]: DEBUG nova.compute.manager [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63241) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1824.204753] env[63241]: DEBUG nova.objects.instance [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lazy-loading 'flavor' on Instance uuid 6055a56d-1e0d-47bc-930b-b62206a0263e {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1824.211453] env[63241]: DEBUG nova.compute.manager [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Received event network-changed-db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1824.211661] env[63241]: DEBUG nova.compute.manager [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Refreshing instance network info cache due to event network-changed-db56e1be-f5b4-4531-8573-93fe90bc8b34. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1824.211900] env[63241]: DEBUG oslo_concurrency.lockutils [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] Acquiring lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.212089] env[63241]: DEBUG oslo_concurrency.lockutils [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] Acquired lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.212385] env[63241]: DEBUG nova.network.neutron [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Refreshing network info cache for port db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1824.222262] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821045, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.353975] env[63241]: DEBUG nova.compute.manager [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1824.432215] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.503629] env[63241]: DEBUG oslo_concurrency.lockutils [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Releasing lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.504484] env[63241]: DEBUG nova.objects.instance [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lazy-loading 'flavor' on Instance uuid 1e172f73-972e-4401-b358-512f7e03b27f {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1824.652673] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2b6dc3-8aa0-4533-ab0d-cfb21339a9d8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.662148] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b6bbee-dcaa-47fe-9a49-c2b390a14eb2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.706035] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38085edc-f7d4-47ad-b022-499cd3aafe0b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.716660] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821045, 'name': ReconfigVM_Task, 'duration_secs': 0.693452} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.719024] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 49d350ff-4932-4759-a3fa-53274c484ae6/49d350ff-4932-4759-a3fa-53274c484ae6.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1824.719209] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance '49d350ff-4932-4759-a3fa-53274c484ae6' progress to 50 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1824.725249] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11a3e78-e374-4d2d-8928-d2dbb27f20cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.729474] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1824.730359] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b738b8e-fbcc-4bb6-89a5-0d97bb959be3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.745259] env[63241]: DEBUG nova.compute.provider_tree [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1824.752897] env[63241]: DEBUG oslo_vmware.api [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1824.752897] env[63241]: value = "task-1821046" [ 1824.752897] env[63241]: _type = "Task" [ 1824.752897] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.766358] env[63241]: DEBUG oslo_vmware.api [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821046, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.784396] env[63241]: DEBUG nova.network.neutron [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Successfully created port: b434fe5e-e77d-4974-8bd4-7226a359e28d {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1824.839635] env[63241]: DEBUG nova.network.neutron [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updated VIF entry in instance network info cache for port db56e1be-f5b4-4531-8573-93fe90bc8b34. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1824.840034] env[63241]: DEBUG nova.network.neutron [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updating instance_info_cache with network_info: [{"id": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "address": "fa:16:3e:9f:6a:40", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb56e1be-f5", "ovs_interfaceid": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.932187] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.013686] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0312e9-e0fc-42a0-b7f4-95f730ddae59 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.047377] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1825.047826] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-855e2c70-f136-418e-9290-54a7df312ce5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.066024] env[63241]: DEBUG oslo_vmware.api [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1825.066024] env[63241]: value = "task-1821047" [ 1825.066024] env[63241]: _type = "Task" [ 1825.066024] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.073944] env[63241]: DEBUG oslo_vmware.api [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.232651] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14428b8-5b1a-48f9-ac74-1116969b1f23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.253486] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df066a0-0f6e-4549-8eb3-cb48eaad66dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.257515] env[63241]: DEBUG nova.scheduler.client.report [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1825.270229] env[63241]: DEBUG oslo_vmware.api [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821046, 'name': PowerOffVM_Task, 'duration_secs': 0.412343} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.284858] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1825.285374] env[63241]: DEBUG nova.compute.manager [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1825.285933] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance '49d350ff-4932-4759-a3fa-53274c484ae6' progress to 67 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1825.291115] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe695e14-f36e-44be-aebc-df2c45f40518 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.343138] env[63241]: DEBUG oslo_concurrency.lockutils [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] Releasing lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.343739] env[63241]: DEBUG nova.compute.manager [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Received event network-changed-db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1825.343739] env[63241]: DEBUG nova.compute.manager [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Refreshing instance network info cache due to event network-changed-db56e1be-f5b4-4531-8573-93fe90bc8b34. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1825.343970] env[63241]: DEBUG oslo_concurrency.lockutils [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] Acquiring lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.344031] env[63241]: DEBUG oslo_concurrency.lockutils [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] Acquired lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.344201] env[63241]: DEBUG nova.network.neutron [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Refreshing network info cache for port db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1825.365387] env[63241]: DEBUG nova.compute.manager [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1825.399275] env[63241]: DEBUG nova.virt.hardware [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1825.399275] env[63241]: DEBUG nova.virt.hardware [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1825.399275] env[63241]: DEBUG nova.virt.hardware [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1825.399275] env[63241]: DEBUG nova.virt.hardware [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1825.404286] env[63241]: DEBUG nova.virt.hardware [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1825.404286] env[63241]: DEBUG nova.virt.hardware [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1825.404454] env[63241]: DEBUG nova.virt.hardware [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1825.404744] env[63241]: DEBUG nova.virt.hardware [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1825.404837] env[63241]: DEBUG nova.virt.hardware [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1825.404998] env[63241]: DEBUG nova.virt.hardware [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1825.405209] env[63241]: DEBUG nova.virt.hardware [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1825.406560] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d752351a-5e6a-410b-b959-101ad2e0da59 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.420964] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1825.420964] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1825.421124] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Deleting the datastore file [datastore1] 12b99b2b-56f0-4ce9-8897-f429c2084f38 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1825.421379] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6049264e-2fd3-4cc1-a33e-65648d8abb76 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.429327] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2271b8d5-6731-4682-8af9-32010acd41d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.434310] env[63241]: INFO nova.compute.manager [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Resuming [ 1825.435341] env[63241]: DEBUG nova.objects.instance [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lazy-loading 'flavor' on Instance uuid 0e4a3b3a-4464-404f-9154-1ab6f97ae951 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1825.443478] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.455015] env[63241]: DEBUG oslo_vmware.api [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1825.455015] env[63241]: value = "task-1821048" [ 1825.455015] env[63241]: _type = "Task" [ 1825.455015] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.464723] env[63241]: DEBUG oslo_vmware.api [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821048, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.550565] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "426b001f-949f-4814-9c10-c7f44b6da44a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.550991] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "426b001f-949f-4814-9c10-c7f44b6da44a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.575971] env[63241]: DEBUG oslo_vmware.api [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821047, 'name': PowerOffVM_Task, 'duration_secs': 0.276231} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.576083] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1825.581682] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Reconfiguring VM instance instance-0000004e to detach disk 2002 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1825.582471] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ec9faf7-ef7a-4894-9d57-14dba8e823ad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.604815] env[63241]: DEBUG oslo_vmware.api [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1825.604815] env[63241]: value = "task-1821049" [ 1825.604815] env[63241]: _type = "Task" [ 1825.604815] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.614105] env[63241]: DEBUG oslo_vmware.api [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821049, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.765843] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.766499] env[63241]: DEBUG nova.compute.manager [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1825.808146] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4e18668a-8245-4e3f-8393-b8dcf7c95893 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "6055a56d-1e0d-47bc-930b-b62206a0263e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.619s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.830687] env[63241]: DEBUG nova.network.neutron [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Port 1c88bbab-4bd7-4ea5-858c-317020381bac binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1825.931555] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821026, 'name': CreateVM_Task, 'duration_secs': 6.715708} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.931737] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1825.932409] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.932578] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.932955] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1825.933228] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47fe8d44-da4c-4f30-82cf-e2d3c43dfde8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.939895] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1825.939895] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5284ebf2-cb7e-ad9e-657d-96fbd2f325eb" [ 1825.939895] env[63241]: _type = "Task" [ 1825.939895] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.952206] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5284ebf2-cb7e-ad9e-657d-96fbd2f325eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.964608] env[63241]: DEBUG oslo_vmware.api [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821048, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.425687} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.964920] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1825.965130] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1825.965333] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1825.965530] env[63241]: INFO nova.compute.manager [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Took 9.44 seconds to destroy the instance on the hypervisor. [ 1825.965789] env[63241]: DEBUG oslo.service.loopingcall [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1825.965986] env[63241]: DEBUG nova.compute.manager [-] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1825.966096] env[63241]: DEBUG nova.network.neutron [-] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1826.053947] env[63241]: DEBUG nova.compute.manager [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1826.115105] env[63241]: DEBUG oslo_vmware.api [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821049, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.122681] env[63241]: DEBUG nova.network.neutron [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updated VIF entry in instance network info cache for port db56e1be-f5b4-4531-8573-93fe90bc8b34. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1826.123047] env[63241]: DEBUG nova.network.neutron [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updating instance_info_cache with network_info: [{"id": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "address": "fa:16:3e:9f:6a:40", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb56e1be-f5", "ovs_interfaceid": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.250114] env[63241]: DEBUG nova.compute.manager [req-06a9b81e-f5dd-4e8a-804e-31bba28a7536 req-b5c51ef1-e292-4580-af1b-2624b6207036 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Received event network-vif-deleted-95095173-ff26-4be0-88de-b44051605ee6 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1826.250449] env[63241]: INFO nova.compute.manager [req-06a9b81e-f5dd-4e8a-804e-31bba28a7536 req-b5c51ef1-e292-4580-af1b-2624b6207036 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Neutron deleted interface 95095173-ff26-4be0-88de-b44051605ee6; detaching it from the instance and deleting it from the info cache [ 1826.250671] env[63241]: DEBUG nova.network.neutron [req-06a9b81e-f5dd-4e8a-804e-31bba28a7536 req-b5c51ef1-e292-4580-af1b-2624b6207036 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.271953] env[63241]: DEBUG nova.compute.utils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1826.273116] env[63241]: DEBUG nova.compute.manager [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1826.273300] env[63241]: DEBUG nova.network.neutron [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1826.318015] env[63241]: DEBUG nova.policy [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac0c578d40af405b8fe206fcd309cf0a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6c76b46a4cf4a32a4a1c25fb81a963d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1826.450082] env[63241]: DEBUG oslo_concurrency.lockutils [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.450277] env[63241]: DEBUG oslo_concurrency.lockutils [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquired lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.450461] env[63241]: DEBUG nova.network.neutron [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1826.459268] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.459431] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Processing image 509bfeca-5406-4a2d-b9c1-64cb54f16cd4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1826.459661] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4/509bfeca-5406-4a2d-b9c1-64cb54f16cd4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.459804] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4/509bfeca-5406-4a2d-b9c1-64cb54f16cd4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.460204] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.460446] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ff9dc91-2310-4031-bdc0-a75bb7638807 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.473380] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1826.473639] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1826.474469] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-434e67a7-c0e8-44f7-b477-31fe91017c6a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.480510] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1826.480510] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f74d82-f38f-39b6-3e77-6bd22995f816" [ 1826.480510] env[63241]: _type = "Task" [ 1826.480510] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.488833] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f74d82-f38f-39b6-3e77-6bd22995f816, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.575924] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.576213] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.577852] env[63241]: INFO nova.compute.claims [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1826.616340] env[63241]: DEBUG oslo_vmware.api [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821049, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.626155] env[63241]: DEBUG oslo_concurrency.lockutils [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] Releasing lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.626588] env[63241]: DEBUG nova.compute.manager [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Received event network-changed-6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1826.626789] env[63241]: DEBUG nova.compute.manager [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing instance network info cache due to event network-changed-6f97669d-a2c6-4625-a1b6-374f5565ebb0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1826.627008] env[63241]: DEBUG oslo_concurrency.lockutils [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] Acquiring lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.627158] env[63241]: DEBUG oslo_concurrency.lockutils [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] Acquired lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.627325] env[63241]: DEBUG nova.network.neutron [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing network info cache for port 6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1826.731163] env[63241]: DEBUG nova.network.neutron [-] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.753185] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8baaef96-c9d4-414d-aeb4-4cbc03ba4c20 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.765609] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc45ae19-d867-4f96-b683-9fd17186a9dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.779736] env[63241]: DEBUG nova.compute.manager [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1826.812098] env[63241]: DEBUG nova.compute.manager [req-06a9b81e-f5dd-4e8a-804e-31bba28a7536 req-b5c51ef1-e292-4580-af1b-2624b6207036 service nova] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Detach interface failed, port_id=95095173-ff26-4be0-88de-b44051605ee6, reason: Instance 12b99b2b-56f0-4ce9-8897-f429c2084f38 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1826.858564] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "49d350ff-4932-4759-a3fa-53274c484ae6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.858933] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "49d350ff-4932-4759-a3fa-53274c484ae6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.859050] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "49d350ff-4932-4759-a3fa-53274c484ae6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.999521] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Preparing fetch location {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1826.999805] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Fetch image to [datastore1] OSTACK_IMG_9bc46550-428f-4799-9614-2b416c6bbc45/OSTACK_IMG_9bc46550-428f-4799-9614-2b416c6bbc45.vmdk {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1826.999997] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Downloading stream optimized image 509bfeca-5406-4a2d-b9c1-64cb54f16cd4 to [datastore1] OSTACK_IMG_9bc46550-428f-4799-9614-2b416c6bbc45/OSTACK_IMG_9bc46550-428f-4799-9614-2b416c6bbc45.vmdk on the data store datastore1 as vApp {{(pid=63241) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1827.000200] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Downloading image file data 509bfeca-5406-4a2d-b9c1-64cb54f16cd4 to the ESX as VM named 'OSTACK_IMG_9bc46550-428f-4799-9614-2b416c6bbc45' {{(pid=63241) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1827.020269] env[63241]: DEBUG nova.network.neutron [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Successfully created port: 240bd985-f430-47f9-83a8-287f0c345a36 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1827.113261] env[63241]: DEBUG oslo_vmware.rw_handles [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1827.113261] env[63241]: value = "resgroup-9" [ 1827.113261] env[63241]: _type = "ResourcePool" [ 1827.113261] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1827.113261] env[63241]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-be1c4b65-f4b2-4802-86ec-a0bca72eaa4e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.136889] env[63241]: DEBUG oslo_vmware.api [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821049, 'name': ReconfigVM_Task, 'duration_secs': 1.160311} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.137973] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Reconfigured VM instance instance-0000004e to detach disk 2002 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1827.137973] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1827.138176] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c6e48c9-493f-4f7d-92d7-238d04ec1a21 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.144235] env[63241]: DEBUG oslo_vmware.rw_handles [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lease: (returnval){ [ 1827.144235] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e6addc-1f62-9c84-708f-77b405b15086" [ 1827.144235] env[63241]: _type = "HttpNfcLease" [ 1827.144235] env[63241]: } obtained for vApp import into resource pool (val){ [ 1827.144235] env[63241]: value = "resgroup-9" [ 1827.144235] env[63241]: _type = "ResourcePool" [ 1827.144235] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1827.144235] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the lease: (returnval){ [ 1827.144235] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e6addc-1f62-9c84-708f-77b405b15086" [ 1827.144235] env[63241]: _type = "HttpNfcLease" [ 1827.144235] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1827.149698] env[63241]: DEBUG oslo_vmware.api [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1827.149698] env[63241]: value = "task-1821051" [ 1827.149698] env[63241]: _type = "Task" [ 1827.149698] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.157194] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1827.157194] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e6addc-1f62-9c84-708f-77b405b15086" [ 1827.157194] env[63241]: _type = "HttpNfcLease" [ 1827.157194] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1827.162250] env[63241]: DEBUG oslo_vmware.api [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821051, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.233118] env[63241]: INFO nova.compute.manager [-] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Took 1.27 seconds to deallocate network for instance. [ 1827.485873] env[63241]: DEBUG nova.network.neutron [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updated VIF entry in instance network info cache for port 6f97669d-a2c6-4625-a1b6-374f5565ebb0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1827.487455] env[63241]: DEBUG nova.network.neutron [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updating instance_info_cache with network_info: [{"id": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "address": "fa:16:3e:0e:96:49", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f97669d-a2", "ovs_interfaceid": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.559634] env[63241]: DEBUG nova.network.neutron [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Successfully updated port: b434fe5e-e77d-4974-8bd4-7226a359e28d {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1827.667434] env[63241]: DEBUG oslo_vmware.api [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821051, 'name': PowerOnVM_Task, 'duration_secs': 0.41819} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.668046] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1827.668046] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e6addc-1f62-9c84-708f-77b405b15086" [ 1827.668046] env[63241]: _type = "HttpNfcLease" [ 1827.668046] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1827.668595] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1827.668944] env[63241]: DEBUG nova.compute.manager [None req-768c3c65-d3f3-416b-9fa5-17f7eff28c4f tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1827.670046] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b0114b-4c92-4727-a4d5-b9b8d1dfe1d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.710173] env[63241]: DEBUG nova.network.neutron [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updating instance_info_cache with network_info: [{"id": "24131a23-55e1-4bd6-8813-5768da05438f", "address": "fa:16:3e:fa:8e:d4", "network": {"id": "355e2d29-1968-4065-94a6-f9e5946a75c5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-154610021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d1a62ae45c74a7ba071363005b3a52e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24131a23-55", "ovs_interfaceid": "24131a23-55e1-4bd6-8813-5768da05438f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.742770] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.792763] env[63241]: DEBUG nova.compute.manager [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1827.823699] env[63241]: DEBUG nova.virt.hardware [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1827.823954] env[63241]: DEBUG nova.virt.hardware [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1827.824180] env[63241]: DEBUG nova.virt.hardware [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1827.826557] env[63241]: DEBUG nova.virt.hardware [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1827.826557] env[63241]: DEBUG nova.virt.hardware [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1827.826557] env[63241]: DEBUG nova.virt.hardware [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1827.826557] env[63241]: DEBUG nova.virt.hardware [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1827.826557] env[63241]: DEBUG nova.virt.hardware [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1827.826557] env[63241]: DEBUG nova.virt.hardware [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1827.826557] env[63241]: DEBUG nova.virt.hardware [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1827.826557] env[63241]: DEBUG nova.virt.hardware [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1827.827015] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8c6a42-8be7-43b5-91a6-6e4ebee5fbff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.838523] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16af07c1-7a60-4fa0-a64f-6b69d9974b3c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.927480] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.927674] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.928101] env[63241]: DEBUG nova.network.neutron [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1827.990176] env[63241]: DEBUG oslo_concurrency.lockutils [req-07f9d39d-4bcb-45aa-a88a-99e488903b8d req-1d40c120-267d-464d-bcc2-1be5923c597d service nova] Releasing lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.019668] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e761c932-9edc-472f-8887-1861f6d3e390 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.027953] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fffb769-7a1b-4e67-9eaa-c9eb15ad01c6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.059275] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684c0167-2c61-4bf1-861d-098894710f6b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.064381] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "refresh_cache-779d2380-be6c-4fdb-8755-10e99f8a6fd9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.064381] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "refresh_cache-779d2380-be6c-4fdb-8755-10e99f8a6fd9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.064381] env[63241]: DEBUG nova.network.neutron [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1828.070196] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f084a8f1-1a32-4d05-b391-cc987271b6e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.087036] env[63241]: DEBUG nova.compute.provider_tree [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.152897] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1828.152897] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e6addc-1f62-9c84-708f-77b405b15086" [ 1828.152897] env[63241]: _type = "HttpNfcLease" [ 1828.152897] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1828.153416] env[63241]: DEBUG oslo_vmware.rw_handles [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1828.153416] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e6addc-1f62-9c84-708f-77b405b15086" [ 1828.153416] env[63241]: _type = "HttpNfcLease" [ 1828.153416] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1828.153963] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c87cb6-b647-4fe8-9658-63b73bfc204f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.161922] env[63241]: DEBUG oslo_vmware.rw_handles [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5262412d-5a9b-6404-b931-4eb6281ffa07/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1828.162125] env[63241]: DEBUG oslo_vmware.rw_handles [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5262412d-5a9b-6404-b931-4eb6281ffa07/disk-0.vmdk. {{(pid=63241) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1828.221899] env[63241]: DEBUG oslo_concurrency.lockutils [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "6055a56d-1e0d-47bc-930b-b62206a0263e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.222148] env[63241]: DEBUG oslo_concurrency.lockutils [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "6055a56d-1e0d-47bc-930b-b62206a0263e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.222356] env[63241]: DEBUG oslo_concurrency.lockutils [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "6055a56d-1e0d-47bc-930b-b62206a0263e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.222540] env[63241]: DEBUG oslo_concurrency.lockutils [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "6055a56d-1e0d-47bc-930b-b62206a0263e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.222708] env[63241]: DEBUG oslo_concurrency.lockutils [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "6055a56d-1e0d-47bc-930b-b62206a0263e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.224589] env[63241]: DEBUG oslo_concurrency.lockutils [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Releasing lock "refresh_cache-0e4a3b3a-4464-404f-9154-1ab6f97ae951" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.228164] env[63241]: INFO nova.compute.manager [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Terminating instance [ 1828.228985] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1347357d-de17-4214-ac76-f69eaf21eaff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.235305] env[63241]: DEBUG nova.compute.manager [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1828.235526] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1828.236858] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df89c134-cbe7-4b6c-96b2-09f2282a6135 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.242185] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-18889610-88ea-4d9f-994f-c8e9615a227d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.246796] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Resuming the VM {{(pid=63241) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1828.249577] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e76488b2-eff2-46e7-a63c-6d0e6f03909b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.251341] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1828.251856] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83872076-c632-4c4c-b4c6-b6ca06cd89f6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.262742] env[63241]: DEBUG oslo_vmware.api [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1828.262742] env[63241]: value = "task-1821052" [ 1828.262742] env[63241]: _type = "Task" [ 1828.262742] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.272334] env[63241]: DEBUG oslo_vmware.api [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.277263] env[63241]: DEBUG nova.compute.manager [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Received event network-vif-plugged-b434fe5e-e77d-4974-8bd4-7226a359e28d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1828.277381] env[63241]: DEBUG oslo_concurrency.lockutils [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] Acquiring lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.277609] env[63241]: DEBUG oslo_concurrency.lockutils [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.277791] env[63241]: DEBUG oslo_concurrency.lockutils [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.277985] env[63241]: DEBUG nova.compute.manager [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] No waiting events found dispatching network-vif-plugged-b434fe5e-e77d-4974-8bd4-7226a359e28d {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1828.278173] env[63241]: WARNING nova.compute.manager [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Received unexpected event network-vif-plugged-b434fe5e-e77d-4974-8bd4-7226a359e28d for instance with vm_state building and task_state spawning. [ 1828.278361] env[63241]: DEBUG nova.compute.manager [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Received event network-changed-b434fe5e-e77d-4974-8bd4-7226a359e28d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1828.278525] env[63241]: DEBUG nova.compute.manager [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Refreshing instance network info cache due to event network-changed-b434fe5e-e77d-4974-8bd4-7226a359e28d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1828.278725] env[63241]: DEBUG oslo_concurrency.lockutils [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] Acquiring lock "refresh_cache-779d2380-be6c-4fdb-8755-10e99f8a6fd9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.528499] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1828.528743] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1828.528976] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleting the datastore file [datastore1] 6055a56d-1e0d-47bc-930b-b62206a0263e {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1828.529421] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d252db1-2705-4828-ab96-99776e0c3652 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.538345] env[63241]: DEBUG oslo_vmware.api [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1828.538345] env[63241]: value = "task-1821054" [ 1828.538345] env[63241]: _type = "Task" [ 1828.538345] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.547904] env[63241]: DEBUG oslo_vmware.api [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821054, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.591173] env[63241]: DEBUG nova.scheduler.client.report [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1828.642660] env[63241]: DEBUG nova.network.neutron [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1828.776110] env[63241]: DEBUG oslo_vmware.api [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821052, 'name': PowerOnVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.054306] env[63241]: DEBUG oslo_vmware.api [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821054, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188196} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.056260] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1829.056526] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1829.056718] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1829.056951] env[63241]: INFO nova.compute.manager [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Took 0.82 seconds to destroy the instance on the hypervisor. [ 1829.057239] env[63241]: DEBUG oslo.service.loopingcall [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1829.057447] env[63241]: DEBUG nova.compute.manager [-] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1829.057551] env[63241]: DEBUG nova.network.neutron [-] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1829.097378] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.098036] env[63241]: DEBUG nova.compute.manager [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1829.103582] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.361s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.103843] env[63241]: DEBUG nova.objects.instance [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lazy-loading 'resources' on Instance uuid 12b99b2b-56f0-4ce9-8897-f429c2084f38 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1829.278948] env[63241]: DEBUG oslo_vmware.api [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821052, 'name': PowerOnVM_Task, 'duration_secs': 0.698939} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.280835] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Resumed the VM {{(pid=63241) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1829.281423] env[63241]: DEBUG nova.compute.manager [None req-10470cfd-b6ad-4fc2-b4e6-1bc9b8bdb2c9 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1829.281905] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc634e36-52e2-4e38-9e4e-69dafb12738a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.430284] env[63241]: DEBUG oslo_vmware.rw_handles [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Completed reading data from the image iterator. {{(pid=63241) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1829.430752] env[63241]: DEBUG oslo_vmware.rw_handles [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5262412d-5a9b-6404-b931-4eb6281ffa07/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1829.431678] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716242c1-e259-4bc8-a58f-cd09ac41defa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.443264] env[63241]: DEBUG oslo_vmware.rw_handles [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5262412d-5a9b-6404-b931-4eb6281ffa07/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1829.443377] env[63241]: DEBUG oslo_vmware.rw_handles [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5262412d-5a9b-6404-b931-4eb6281ffa07/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1829.444672] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-e2628189-b828-470b-9730-9235c4958146 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.447748] env[63241]: DEBUG nova.compute.manager [req-31800c0b-f863-4184-8438-4aa3228597e7 req-61ffdf1b-f4e3-4b98-ba7d-824c97529c01 service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Received event network-vif-deleted-93ca840a-01ca-4805-8371-2a7fee63b9ee {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1829.447973] env[63241]: INFO nova.compute.manager [req-31800c0b-f863-4184-8438-4aa3228597e7 req-61ffdf1b-f4e3-4b98-ba7d-824c97529c01 service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Neutron deleted interface 93ca840a-01ca-4805-8371-2a7fee63b9ee; detaching it from the instance and deleting it from the info cache [ 1829.448211] env[63241]: DEBUG nova.network.neutron [req-31800c0b-f863-4184-8438-4aa3228597e7 req-61ffdf1b-f4e3-4b98-ba7d-824c97529c01 service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.530575] env[63241]: DEBUG nova.network.neutron [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Updating instance_info_cache with network_info: [{"id": "b434fe5e-e77d-4974-8bd4-7226a359e28d", "address": "fa:16:3e:27:f8:85", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb434fe5e-e7", "ovs_interfaceid": "b434fe5e-e77d-4974-8bd4-7226a359e28d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.587962] env[63241]: DEBUG nova.network.neutron [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance_info_cache with network_info: [{"id": "1c88bbab-4bd7-4ea5-858c-317020381bac", "address": "fa:16:3e:b5:7a:04", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c88bbab-4b", "ovs_interfaceid": "1c88bbab-4bd7-4ea5-858c-317020381bac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.606962] env[63241]: DEBUG nova.compute.utils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1829.612801] env[63241]: DEBUG nova.compute.manager [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1829.613893] env[63241]: DEBUG nova.network.neutron [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1829.728584] env[63241]: DEBUG nova.policy [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2f5065726be41378be3b5bc0198f9fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08e0c8d883004d0fb18507be072eb781', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1829.735204] env[63241]: DEBUG oslo_vmware.rw_handles [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5262412d-5a9b-6404-b931-4eb6281ffa07/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1829.735564] env[63241]: INFO nova.virt.vmwareapi.images [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Downloaded image file data 509bfeca-5406-4a2d-b9c1-64cb54f16cd4 [ 1829.736429] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88fc481a-7285-4366-87d4-74847b5cea73 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.756324] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf96a38a-6a8b-4e57-85e4-dda60d8d422b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.853130] env[63241]: DEBUG nova.network.neutron [-] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.882865] env[63241]: INFO nova.virt.vmwareapi.images [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] The imported VM was unregistered [ 1829.885966] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Caching image {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1829.886236] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating directory with path [datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4 {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1829.889287] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78f407a8-e2ad-45f0-a66a-5f5019492dd6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.921678] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Created directory with path [datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4 {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1829.921678] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_9bc46550-428f-4799-9614-2b416c6bbc45/OSTACK_IMG_9bc46550-428f-4799-9614-2b416c6bbc45.vmdk to [datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4/509bfeca-5406-4a2d-b9c1-64cb54f16cd4.vmdk. {{(pid=63241) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1829.921678] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-30c4782a-498d-42bc-92fe-49397f5259e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.930239] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1829.930239] env[63241]: value = "task-1821056" [ 1829.930239] env[63241]: _type = "Task" [ 1829.930239] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.940490] env[63241]: DEBUG nova.compute.manager [req-042665dd-e622-4d82-9a45-b3909b009cd0 req-6f806794-86d9-4712-9f56-13b35e81fb77 service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Received event network-vif-plugged-240bd985-f430-47f9-83a8-287f0c345a36 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1829.940809] env[63241]: DEBUG oslo_concurrency.lockutils [req-042665dd-e622-4d82-9a45-b3909b009cd0 req-6f806794-86d9-4712-9f56-13b35e81fb77 service nova] Acquiring lock "6f3cbd23-30b9-4502-be07-2edd0a701291-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.941140] env[63241]: DEBUG oslo_concurrency.lockutils [req-042665dd-e622-4d82-9a45-b3909b009cd0 req-6f806794-86d9-4712-9f56-13b35e81fb77 service nova] Lock "6f3cbd23-30b9-4502-be07-2edd0a701291-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.941767] env[63241]: DEBUG oslo_concurrency.lockutils [req-042665dd-e622-4d82-9a45-b3909b009cd0 req-6f806794-86d9-4712-9f56-13b35e81fb77 service nova] Lock "6f3cbd23-30b9-4502-be07-2edd0a701291-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.942031] env[63241]: DEBUG nova.compute.manager [req-042665dd-e622-4d82-9a45-b3909b009cd0 req-6f806794-86d9-4712-9f56-13b35e81fb77 service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] No waiting events found dispatching network-vif-plugged-240bd985-f430-47f9-83a8-287f0c345a36 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1829.942272] env[63241]: WARNING nova.compute.manager [req-042665dd-e622-4d82-9a45-b3909b009cd0 req-6f806794-86d9-4712-9f56-13b35e81fb77 service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Received unexpected event network-vif-plugged-240bd985-f430-47f9-83a8-287f0c345a36 for instance with vm_state building and task_state spawning. [ 1829.948924] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821056, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.950629] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c629b877-bab6-4be5-9cc2-f53feda499c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.965519] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec31c43-f0fe-40b2-b259-7cbcc3450c05 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.979247] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb93e1c-6ea4-4ccf-9fa2-4d2ec7646ff4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.989385] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d119feec-b14d-4849-949c-6d6d081650f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.009060] env[63241]: DEBUG nova.compute.manager [req-31800c0b-f863-4184-8438-4aa3228597e7 req-61ffdf1b-f4e3-4b98-ba7d-824c97529c01 service nova] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Detach interface failed, port_id=93ca840a-01ca-4805-8371-2a7fee63b9ee, reason: Instance 6055a56d-1e0d-47bc-930b-b62206a0263e could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1830.034333] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "refresh_cache-779d2380-be6c-4fdb-8755-10e99f8a6fd9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.034762] env[63241]: DEBUG nova.compute.manager [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Instance network_info: |[{"id": "b434fe5e-e77d-4974-8bd4-7226a359e28d", "address": "fa:16:3e:27:f8:85", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb434fe5e-e7", "ovs_interfaceid": "b434fe5e-e77d-4974-8bd4-7226a359e28d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1830.035339] env[63241]: DEBUG oslo_concurrency.lockutils [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] Acquired lock "refresh_cache-779d2380-be6c-4fdb-8755-10e99f8a6fd9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1830.035598] env[63241]: DEBUG nova.network.neutron [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Refreshing network info cache for port b434fe5e-e77d-4974-8bd4-7226a359e28d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1830.037330] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:f8:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b434fe5e-e77d-4974-8bd4-7226a359e28d', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1830.045471] env[63241]: DEBUG oslo.service.loopingcall [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1830.046215] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5e58e8-da9f-4839-a8e8-ccb0829ec7e0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.049793] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1830.050033] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e30c12c-227d-4268-bf83-69630dfdd27c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.072293] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad6cf92-6334-4081-b24e-ad1c994ddcfd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.079884] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1830.079884] env[63241]: value = "task-1821057" [ 1830.079884] env[63241]: _type = "Task" [ 1830.079884] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.092328] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.096195] env[63241]: DEBUG nova.compute.provider_tree [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1830.098343] env[63241]: DEBUG nova.network.neutron [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Successfully created port: 0b88ea9e-8926-4fb6-a2f7-3ba89336c41d {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1830.105588] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821057, 'name': CreateVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.115867] env[63241]: DEBUG nova.compute.manager [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1830.197244] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "interface-6b96988b-cc79-41d7-a17d-277ae5aeb4dc-55bc89c7-241d-48af-9915-9dd2f1afd2c0" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.197558] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-6b96988b-cc79-41d7-a17d-277ae5aeb4dc-55bc89c7-241d-48af-9915-9dd2f1afd2c0" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.198348] env[63241]: DEBUG nova.objects.instance [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'flavor' on Instance uuid 6b96988b-cc79-41d7-a17d-277ae5aeb4dc {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1830.351773] env[63241]: DEBUG nova.network.neutron [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Successfully updated port: 240bd985-f430-47f9-83a8-287f0c345a36 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1830.356671] env[63241]: INFO nova.compute.manager [-] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Took 1.30 seconds to deallocate network for instance. [ 1830.409772] env[63241]: DEBUG nova.compute.manager [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Received event network-changed-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1830.410417] env[63241]: DEBUG nova.compute.manager [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Refreshing instance network info cache due to event network-changed-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1830.410417] env[63241]: DEBUG oslo_concurrency.lockutils [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] Acquiring lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1830.410417] env[63241]: DEBUG oslo_concurrency.lockutils [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] Acquired lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1830.410634] env[63241]: DEBUG nova.network.neutron [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Refreshing network info cache for port fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1830.439804] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821056, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.595164] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821057, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.624842] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff91c19f-08c7-4039-900e-5e521eabd0a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.628670] env[63241]: ERROR nova.scheduler.client.report [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [req-db35b966-f51b-4e12-9bad-a78502ddf595] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-db35b966-f51b-4e12-9bad-a78502ddf595"}]} [ 1830.653946] env[63241]: DEBUG nova.scheduler.client.report [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1830.656424] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33d7cf4-c134-41de-afd1-cd406f22cd7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.668020] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance '49d350ff-4932-4759-a3fa-53274c484ae6' progress to 83 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1830.677217] env[63241]: DEBUG nova.scheduler.client.report [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1830.677483] env[63241]: DEBUG nova.compute.provider_tree [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1830.693429] env[63241]: DEBUG nova.scheduler.client.report [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1830.713870] env[63241]: DEBUG nova.scheduler.client.report [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1830.868485] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "refresh_cache-6f3cbd23-30b9-4502-be07-2edd0a701291" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1830.868485] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "refresh_cache-6f3cbd23-30b9-4502-be07-2edd0a701291" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1830.869096] env[63241]: DEBUG nova.network.neutron [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1830.870414] env[63241]: DEBUG oslo_concurrency.lockutils [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.946743] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821056, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.964713] env[63241]: DEBUG nova.objects.instance [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'pci_requests' on Instance uuid 6b96988b-cc79-41d7-a17d-277ae5aeb4dc {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1830.976922] env[63241]: DEBUG nova.network.neutron [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Updated VIF entry in instance network info cache for port b434fe5e-e77d-4974-8bd4-7226a359e28d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1830.977652] env[63241]: DEBUG nova.network.neutron [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Updating instance_info_cache with network_info: [{"id": "b434fe5e-e77d-4974-8bd4-7226a359e28d", "address": "fa:16:3e:27:f8:85", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb434fe5e-e7", "ovs_interfaceid": "b434fe5e-e77d-4974-8bd4-7226a359e28d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1831.045122] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668f279a-87b0-4010-b0cf-4b622e3c8ed3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.060376] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0cf5e6-66fc-4c9a-b596-b1cff39d994f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.104077] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63dcc27-3132-4ad9-8802-ea93c7f6cf55 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.113659] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e21610-d375-448e-8447-b395b3fdb70f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.121072] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821057, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.133314] env[63241]: DEBUG nova.compute.manager [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1831.136094] env[63241]: DEBUG nova.compute.provider_tree [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1831.164284] env[63241]: DEBUG nova.virt.hardware [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1831.164565] env[63241]: DEBUG nova.virt.hardware [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1831.164784] env[63241]: DEBUG nova.virt.hardware [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1831.164985] env[63241]: DEBUG nova.virt.hardware [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1831.165158] env[63241]: DEBUG nova.virt.hardware [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1831.165307] env[63241]: DEBUG nova.virt.hardware [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1831.165522] env[63241]: DEBUG nova.virt.hardware [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1831.165683] env[63241]: DEBUG nova.virt.hardware [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1831.165850] env[63241]: DEBUG nova.virt.hardware [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1831.166150] env[63241]: DEBUG nova.virt.hardware [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1831.166240] env[63241]: DEBUG nova.virt.hardware [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1831.167438] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdabcb88-4246-432a-a3cc-9293bba757e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.178301] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fdf211-a861-42f0-9e80-527aba14ad26 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.185975] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1831.186602] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc8f725e-6f4e-408f-af3a-8a71ad4d1bcc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.202512] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1831.202512] env[63241]: value = "task-1821058" [ 1831.202512] env[63241]: _type = "Task" [ 1831.202512] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.213879] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821058, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.224994] env[63241]: DEBUG nova.network.neutron [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updated VIF entry in instance network info cache for port fbeb829e-4c31-429b-bdb0-ecb7331ef4ea. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1831.225485] env[63241]: DEBUG nova.network.neutron [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updating instance_info_cache with network_info: [{"id": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "address": "fa:16:3e:17:1d:18", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbeb829e-4c", "ovs_interfaceid": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1831.403641] env[63241]: DEBUG nova.network.neutron [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1831.442259] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821056, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.478933] env[63241]: DEBUG nova.objects.base [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Object Instance<6b96988b-cc79-41d7-a17d-277ae5aeb4dc> lazy-loaded attributes: flavor,pci_requests {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1831.479166] env[63241]: DEBUG nova.network.neutron [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1831.483106] env[63241]: DEBUG oslo_concurrency.lockutils [req-c051cde3-5d20-42c3-bcec-c90b5faca6e9 req-079d3cf9-5ca1-400d-bd3a-8118046c75ac service nova] Releasing lock "refresh_cache-779d2380-be6c-4fdb-8755-10e99f8a6fd9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.558321] env[63241]: DEBUG nova.network.neutron [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Updating instance_info_cache with network_info: [{"id": "240bd985-f430-47f9-83a8-287f0c345a36", "address": "fa:16:3e:0c:cc:32", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap240bd985-f4", "ovs_interfaceid": "240bd985-f430-47f9-83a8-287f0c345a36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1831.611889] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821057, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.618542] env[63241]: DEBUG nova.policy [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54dc853b6f204a75ae7612f9fbd2d1f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecafb0abbdc74501b22b20b797c4c60c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1831.676773] env[63241]: DEBUG nova.scheduler.client.report [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 152 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1831.677132] env[63241]: DEBUG nova.compute.provider_tree [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 152 to 153 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1831.677348] env[63241]: DEBUG nova.compute.provider_tree [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1831.714919] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821058, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.728182] env[63241]: DEBUG oslo_concurrency.lockutils [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] Releasing lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.728471] env[63241]: DEBUG nova.compute.manager [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Received event network-changed-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1831.728653] env[63241]: DEBUG nova.compute.manager [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Refreshing instance network info cache due to event network-changed-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1831.728883] env[63241]: DEBUG oslo_concurrency.lockutils [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] Acquiring lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.729087] env[63241]: DEBUG oslo_concurrency.lockutils [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] Acquired lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.729355] env[63241]: DEBUG nova.network.neutron [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Refreshing network info cache for port fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1831.943163] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821056, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.060951] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "refresh_cache-6f3cbd23-30b9-4502-be07-2edd0a701291" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1832.061116] env[63241]: DEBUG nova.compute.manager [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Instance network_info: |[{"id": "240bd985-f430-47f9-83a8-287f0c345a36", "address": "fa:16:3e:0c:cc:32", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap240bd985-f4", "ovs_interfaceid": "240bd985-f430-47f9-83a8-287f0c345a36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1832.061574] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:cc:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '240bd985-f430-47f9-83a8-287f0c345a36', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1832.070116] env[63241]: DEBUG oslo.service.loopingcall [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1832.070421] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1832.070656] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52376a07-7b82-47c4-987a-d8b395e9aebb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.096247] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1832.096247] env[63241]: value = "task-1821059" [ 1832.096247] env[63241]: _type = "Task" [ 1832.096247] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.109753] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821057, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.113312] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821059, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.162303] env[63241]: DEBUG nova.network.neutron [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Successfully updated port: 0b88ea9e-8926-4fb6-a2f7-3ba89336c41d {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1832.184749] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.081s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.189469] env[63241]: DEBUG oslo_concurrency.lockutils [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.318s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.189469] env[63241]: DEBUG nova.objects.instance [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lazy-loading 'resources' on Instance uuid 6055a56d-1e0d-47bc-930b-b62206a0263e {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1832.217482] env[63241]: INFO nova.scheduler.client.report [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Deleted allocations for instance 12b99b2b-56f0-4ce9-8897-f429c2084f38 [ 1832.223293] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821058, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.442384] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821056, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.581606] env[63241]: DEBUG nova.network.neutron [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updated VIF entry in instance network info cache for port fbeb829e-4c31-429b-bdb0-ecb7331ef4ea. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1832.582096] env[63241]: DEBUG nova.network.neutron [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updating instance_info_cache with network_info: [{"id": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "address": "fa:16:3e:17:1d:18", "network": {"id": "a5587086-6eea-482f-a50b-ec3bf24a190a", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2104290451-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "854490ce445a413d85901cfe6b091346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbeb829e-4c", "ovs_interfaceid": "fbeb829e-4c31-429b-bdb0-ecb7331ef4ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.611024] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821059, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.614212] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821057, 'name': CreateVM_Task, 'duration_secs': 2.253783} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.614385] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1832.615247] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.615358] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.615651] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1832.615916] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26c24d09-8a2f-49eb-8f23-bbfd788be2f4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.621236] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1832.621236] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52f4f9de-7bca-98dd-5ef9-ee27b69ce426" [ 1832.621236] env[63241]: _type = "Task" [ 1832.621236] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.629987] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f4f9de-7bca-98dd-5ef9-ee27b69ce426, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.665760] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "refresh_cache-426b001f-949f-4814-9c10-c7f44b6da44a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.665941] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquired lock "refresh_cache-426b001f-949f-4814-9c10-c7f44b6da44a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.666103] env[63241]: DEBUG nova.network.neutron [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1832.716649] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821058, 'name': PowerOnVM_Task} progress is 96%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.730434] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6b630eac-da6e-46c7-b555-dc26814c6e7b tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "12b99b2b-56f0-4ce9-8897-f429c2084f38" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.211s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.782187] env[63241]: DEBUG nova.compute.manager [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Received event network-changed-240bd985-f430-47f9-83a8-287f0c345a36 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1832.782663] env[63241]: DEBUG nova.compute.manager [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Refreshing instance network info cache due to event network-changed-240bd985-f430-47f9-83a8-287f0c345a36. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1832.782895] env[63241]: DEBUG oslo_concurrency.lockutils [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] Acquiring lock "refresh_cache-6f3cbd23-30b9-4502-be07-2edd0a701291" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.783057] env[63241]: DEBUG oslo_concurrency.lockutils [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] Acquired lock "refresh_cache-6f3cbd23-30b9-4502-be07-2edd0a701291" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.783233] env[63241]: DEBUG nova.network.neutron [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Refreshing network info cache for port 240bd985-f430-47f9-83a8-287f0c345a36 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1832.937309] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9272f4-ab35-479a-a818-d324e7483cce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.947772] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7ab49b-c174-43b8-be72-5f5abd59179a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.950933] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821056, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.718131} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.951213] env[63241]: INFO nova.virt.vmwareapi.ds_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_9bc46550-428f-4799-9614-2b416c6bbc45/OSTACK_IMG_9bc46550-428f-4799-9614-2b416c6bbc45.vmdk to [datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4/509bfeca-5406-4a2d-b9c1-64cb54f16cd4.vmdk. [ 1832.951388] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Cleaning up location [datastore1] OSTACK_IMG_9bc46550-428f-4799-9614-2b416c6bbc45 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1832.951555] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_9bc46550-428f-4799-9614-2b416c6bbc45 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1832.952105] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3a00c5b-71a5-4464-947b-256b8bd63a4f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.984095] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e7cd9d-605a-4726-82c1-0f606f555b95 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.986448] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1832.986448] env[63241]: value = "task-1821060" [ 1832.986448] env[63241]: _type = "Task" [ 1832.986448] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.993399] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03cac4d2-aec1-44e7-9eb9-b32301db5ff7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.000290] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.011076] env[63241]: DEBUG nova.compute.provider_tree [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1833.085317] env[63241]: DEBUG oslo_concurrency.lockutils [req-cff4d362-ee2b-4046-82bd-7af10b6a50f1 req-751cde80-4428-4808-8f2b-d77ce05f9305 service nova] Releasing lock "refresh_cache-1e172f73-972e-4401-b358-512f7e03b27f" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.108126] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821059, 'name': CreateVM_Task, 'duration_secs': 0.732863} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.108341] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1833.108983] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1833.131880] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52f4f9de-7bca-98dd-5ef9-ee27b69ce426, 'name': SearchDatastore_Task, 'duration_secs': 0.011416} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.132197] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.132482] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1833.132740] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1833.132891] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.133083] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1833.133364] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.133669] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1833.133891] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33907cc6-a58c-4f04-9f2a-9599c01aaf2f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.135874] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e626e8b-bad4-418a-ae97-3cd8ffd377d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.141722] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1833.141722] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5264aef8-935e-008e-c917-b53c52b85775" [ 1833.141722] env[63241]: _type = "Task" [ 1833.141722] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.145826] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1833.146054] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1833.147190] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d2288ce-9ae0-4bba-b103-47955abd8882 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.152763] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5264aef8-935e-008e-c917-b53c52b85775, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.156157] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1833.156157] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b45401-296c-161a-d419-af59c5fc28f6" [ 1833.156157] env[63241]: _type = "Task" [ 1833.156157] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.163742] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b45401-296c-161a-d419-af59c5fc28f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.201819] env[63241]: DEBUG nova.network.neutron [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1833.214296] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821058, 'name': PowerOnVM_Task} progress is 96%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.491625] env[63241]: DEBUG nova.network.neutron [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Updated VIF entry in instance network info cache for port 240bd985-f430-47f9-83a8-287f0c345a36. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1833.491941] env[63241]: DEBUG nova.network.neutron [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Updating instance_info_cache with network_info: [{"id": "240bd985-f430-47f9-83a8-287f0c345a36", "address": "fa:16:3e:0c:cc:32", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap240bd985-f4", "ovs_interfaceid": "240bd985-f430-47f9-83a8-287f0c345a36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.498873] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.042594} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.499167] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1833.499374] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4/509bfeca-5406-4a2d-b9c1-64cb54f16cd4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.499627] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4/509bfeca-5406-4a2d-b9c1-64cb54f16cd4.vmdk to [datastore1] 01af6dc5-e0e7-4f8b-ad07-73af80c32577/01af6dc5-e0e7-4f8b-ad07-73af80c32577.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1833.499908] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-048e6254-56cf-44d5-bf75-322b3d2dcae9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.508124] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1833.508124] env[63241]: value = "task-1821061" [ 1833.508124] env[63241]: _type = "Task" [ 1833.508124] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.512476] env[63241]: DEBUG nova.network.neutron [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Updating instance_info_cache with network_info: [{"id": "0b88ea9e-8926-4fb6-a2f7-3ba89336c41d", "address": "fa:16:3e:14:3d:08", "network": {"id": "da181c86-2cd2-4b0b-bf7c-0c2bdbb63796", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-51851294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08e0c8d883004d0fb18507be072eb781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b88ea9e-89", "ovs_interfaceid": "0b88ea9e-8926-4fb6-a2f7-3ba89336c41d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.514889] env[63241]: DEBUG nova.scheduler.client.report [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1833.523643] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821061, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.653433] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5264aef8-935e-008e-c917-b53c52b85775, 'name': SearchDatastore_Task, 'duration_secs': 0.011428} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.653794] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.654047] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1833.654274] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1833.670794] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b45401-296c-161a-d419-af59c5fc28f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009914} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.671725] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb4f6b6e-ba5f-4320-91b3-61bdc3f26f4b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.679316] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1833.679316] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524d17f0-b372-97e6-d3c9-7b1c1789b2cc" [ 1833.679316] env[63241]: _type = "Task" [ 1833.679316] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.689971] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524d17f0-b372-97e6-d3c9-7b1c1789b2cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.721946] env[63241]: DEBUG oslo_vmware.api [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821058, 'name': PowerOnVM_Task, 'duration_secs': 2.220217} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.722594] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1833.723240] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1787d14-c395-4ac3-af86-a2458d526340 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance '49d350ff-4932-4759-a3fa-53274c484ae6' progress to 100 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1833.934469] env[63241]: DEBUG nova.network.neutron [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Successfully updated port: 55bc89c7-241d-48af-9915-9dd2f1afd2c0 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1833.995173] env[63241]: DEBUG oslo_concurrency.lockutils [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] Releasing lock "refresh_cache-6f3cbd23-30b9-4502-be07-2edd0a701291" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.995302] env[63241]: DEBUG nova.compute.manager [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Received event network-vif-plugged-0b88ea9e-8926-4fb6-a2f7-3ba89336c41d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1833.995551] env[63241]: DEBUG oslo_concurrency.lockutils [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] Acquiring lock "426b001f-949f-4814-9c10-c7f44b6da44a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.995771] env[63241]: DEBUG oslo_concurrency.lockutils [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] Lock "426b001f-949f-4814-9c10-c7f44b6da44a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.995938] env[63241]: DEBUG oslo_concurrency.lockutils [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] Lock "426b001f-949f-4814-9c10-c7f44b6da44a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.996126] env[63241]: DEBUG nova.compute.manager [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] No waiting events found dispatching network-vif-plugged-0b88ea9e-8926-4fb6-a2f7-3ba89336c41d {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1833.996311] env[63241]: WARNING nova.compute.manager [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Received unexpected event network-vif-plugged-0b88ea9e-8926-4fb6-a2f7-3ba89336c41d for instance with vm_state building and task_state spawning. [ 1833.996525] env[63241]: DEBUG nova.compute.manager [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Received event network-changed-0b88ea9e-8926-4fb6-a2f7-3ba89336c41d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1833.996746] env[63241]: DEBUG nova.compute.manager [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Refreshing instance network info cache due to event network-changed-0b88ea9e-8926-4fb6-a2f7-3ba89336c41d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1833.996835] env[63241]: DEBUG oslo_concurrency.lockutils [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] Acquiring lock "refresh_cache-426b001f-949f-4814-9c10-c7f44b6da44a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.018913] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Releasing lock "refresh_cache-426b001f-949f-4814-9c10-c7f44b6da44a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.019370] env[63241]: DEBUG nova.compute.manager [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Instance network_info: |[{"id": "0b88ea9e-8926-4fb6-a2f7-3ba89336c41d", "address": "fa:16:3e:14:3d:08", "network": {"id": "da181c86-2cd2-4b0b-bf7c-0c2bdbb63796", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-51851294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08e0c8d883004d0fb18507be072eb781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b88ea9e-89", "ovs_interfaceid": "0b88ea9e-8926-4fb6-a2f7-3ba89336c41d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1834.020039] env[63241]: DEBUG oslo_concurrency.lockutils [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.025826] env[63241]: DEBUG oslo_concurrency.lockutils [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] Acquired lock "refresh_cache-426b001f-949f-4814-9c10-c7f44b6da44a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.026314] env[63241]: DEBUG nova.network.neutron [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Refreshing network info cache for port 0b88ea9e-8926-4fb6-a2f7-3ba89336c41d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1834.027409] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:3d:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39ab9baf-90cd-4fe2-8d56-434f8210fc19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b88ea9e-8926-4fb6-a2f7-3ba89336c41d', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1834.039371] env[63241]: DEBUG oslo.service.loopingcall [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1834.039662] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821061, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.043917] env[63241]: INFO nova.scheduler.client.report [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted allocations for instance 6055a56d-1e0d-47bc-930b-b62206a0263e [ 1834.044908] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1834.048054] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12f2539e-d182-4958-9e76-60d0f37471e7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.071508] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1834.071508] env[63241]: value = "task-1821062" [ 1834.071508] env[63241]: _type = "Task" [ 1834.071508] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.081254] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821062, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.195176] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524d17f0-b372-97e6-d3c9-7b1c1789b2cc, 'name': SearchDatastore_Task, 'duration_secs': 0.063535} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.195558] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.195849] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 779d2380-be6c-4fdb-8755-10e99f8a6fd9/779d2380-be6c-4fdb-8755-10e99f8a6fd9.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1834.196178] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.196379] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1834.196673] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3ecc428-86b0-4c20-9e7b-63f9e2f5a8a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.199853] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c23116dd-cd67-4cd6-9cf6-3132dcc1e0ef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.211057] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1834.211057] env[63241]: value = "task-1821063" [ 1834.211057] env[63241]: _type = "Task" [ 1834.211057] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.222749] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.224527] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1834.224804] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1834.225765] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddc263de-eec1-40ff-809f-5d9e26e058fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.236235] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1834.236235] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5210d7e8-4944-8664-999f-30c8388afd05" [ 1834.236235] env[63241]: _type = "Task" [ 1834.236235] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.251075] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5210d7e8-4944-8664-999f-30c8388afd05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.287907] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.288366] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.437008] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.437350] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.437553] env[63241]: DEBUG nova.network.neutron [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1834.440834] env[63241]: DEBUG nova.network.neutron [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Updated VIF entry in instance network info cache for port 0b88ea9e-8926-4fb6-a2f7-3ba89336c41d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1834.441201] env[63241]: DEBUG nova.network.neutron [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Updating instance_info_cache with network_info: [{"id": "0b88ea9e-8926-4fb6-a2f7-3ba89336c41d", "address": "fa:16:3e:14:3d:08", "network": {"id": "da181c86-2cd2-4b0b-bf7c-0c2bdbb63796", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-51851294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08e0c8d883004d0fb18507be072eb781", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39ab9baf-90cd-4fe2-8d56-434f8210fc19", "external-id": "nsx-vlan-transportzone-713", "segmentation_id": 713, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b88ea9e-89", "ovs_interfaceid": "0b88ea9e-8926-4fb6-a2f7-3ba89336c41d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.520349] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821061, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.568596] env[63241]: DEBUG oslo_concurrency.lockutils [None req-33e22912-7f7e-4c40-ac6b-5762e2a5b445 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "6055a56d-1e0d-47bc-930b-b62206a0263e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.346s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.584540] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821062, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.724474] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.752994] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5210d7e8-4944-8664-999f-30c8388afd05, 'name': SearchDatastore_Task, 'duration_secs': 0.069039} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.753937] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a181ce9e-e31b-4a56-92c5-d402db07eb2e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.762221] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1834.762221] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529e5481-52f5-1a81-4fe2-151e8a63a6d0" [ 1834.762221] env[63241]: _type = "Task" [ 1834.762221] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.772355] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529e5481-52f5-1a81-4fe2-151e8a63a6d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.792514] env[63241]: DEBUG nova.compute.manager [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1834.926129] env[63241]: DEBUG nova.compute.manager [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Received event network-vif-plugged-55bc89c7-241d-48af-9915-9dd2f1afd2c0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1834.926370] env[63241]: DEBUG oslo_concurrency.lockutils [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] Acquiring lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.926585] env[63241]: DEBUG oslo_concurrency.lockutils [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.926747] env[63241]: DEBUG oslo_concurrency.lockutils [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.926922] env[63241]: DEBUG nova.compute.manager [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] No waiting events found dispatching network-vif-plugged-55bc89c7-241d-48af-9915-9dd2f1afd2c0 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1834.927100] env[63241]: WARNING nova.compute.manager [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Received unexpected event network-vif-plugged-55bc89c7-241d-48af-9915-9dd2f1afd2c0 for instance with vm_state active and task_state None. [ 1834.927295] env[63241]: DEBUG nova.compute.manager [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Received event network-changed-55bc89c7-241d-48af-9915-9dd2f1afd2c0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1834.927508] env[63241]: DEBUG nova.compute.manager [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing instance network info cache due to event network-changed-55bc89c7-241d-48af-9915-9dd2f1afd2c0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1834.927641] env[63241]: DEBUG oslo_concurrency.lockutils [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] Acquiring lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.943391] env[63241]: DEBUG oslo_concurrency.lockutils [req-293e725b-d8aa-47ad-8d0f-2143b3d8c09d req-82b9669b-57f6-403f-aa2e-0cc738059958 service nova] Releasing lock "refresh_cache-426b001f-949f-4814-9c10-c7f44b6da44a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.982481] env[63241]: WARNING nova.network.neutron [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] cafd3d43-975a-4836-8948-2f1b47e56666 already exists in list: networks containing: ['cafd3d43-975a-4836-8948-2f1b47e56666']. ignoring it [ 1835.022093] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821061, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.086643] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821062, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.224630] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.274137] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529e5481-52f5-1a81-4fe2-151e8a63a6d0, 'name': SearchDatastore_Task, 'duration_secs': 0.095117} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.277764] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.278114] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 6f3cbd23-30b9-4502-be07-2edd0a701291/6f3cbd23-30b9-4502-be07-2edd0a701291.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1835.278426] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0683346-6a43-412d-8ecf-ed7d93762b57 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.288811] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1835.288811] env[63241]: value = "task-1821064" [ 1835.288811] env[63241]: _type = "Task" [ 1835.288811] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.303806] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.322837] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.323130] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.324824] env[63241]: INFO nova.compute.claims [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1835.493071] env[63241]: DEBUG nova.network.neutron [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updating instance_info_cache with network_info: [{"id": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "address": "fa:16:3e:0e:96:49", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f97669d-a2", "ovs_interfaceid": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55bc89c7-241d-48af-9915-9dd2f1afd2c0", "address": "fa:16:3e:75:b6:69", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55bc89c7-24", "ovs_interfaceid": "55bc89c7-241d-48af-9915-9dd2f1afd2c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.522730] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821061, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.585040] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821062, 'name': CreateVM_Task, 'duration_secs': 1.088102} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.585278] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1835.585967] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.586156] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.586557] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1835.586835] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9018252-e132-4d80-8cee-bd91fafb9e77 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.593526] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1835.593526] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ceda21-9264-6ce6-0feb-e100a98cf44a" [ 1835.593526] env[63241]: _type = "Task" [ 1835.593526] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.604294] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ceda21-9264-6ce6-0feb-e100a98cf44a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.723436] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.801075] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.997748] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.997748] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.997748] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.998123] env[63241]: DEBUG oslo_concurrency.lockutils [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] Acquired lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.998345] env[63241]: DEBUG nova.network.neutron [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing network info cache for port 55bc89c7-241d-48af-9915-9dd2f1afd2c0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1836.000908] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a4746b-97ed-43ee-a37e-1d772886e125 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.019652] env[63241]: DEBUG nova.virt.hardware [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1836.019927] env[63241]: DEBUG nova.virt.hardware [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1836.020099] env[63241]: DEBUG nova.virt.hardware [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1836.020293] env[63241]: DEBUG nova.virt.hardware [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1836.020437] env[63241]: DEBUG nova.virt.hardware [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1836.020808] env[63241]: DEBUG nova.virt.hardware [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1836.020808] env[63241]: DEBUG nova.virt.hardware [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1836.020947] env[63241]: DEBUG nova.virt.hardware [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1836.021126] env[63241]: DEBUG nova.virt.hardware [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1836.021292] env[63241]: DEBUG nova.virt.hardware [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1836.022053] env[63241]: DEBUG nova.virt.hardware [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1836.028076] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Reconfiguring VM to attach interface {{(pid=63241) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1836.032320] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3da94f40-a933-459e-8254-2b867c1912d9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.052044] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821061, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.053988] env[63241]: DEBUG oslo_vmware.api [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1836.053988] env[63241]: value = "task-1821065" [ 1836.053988] env[63241]: _type = "Task" [ 1836.053988] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.063968] env[63241]: DEBUG oslo_vmware.api [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821065, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.106338] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ceda21-9264-6ce6-0feb-e100a98cf44a, 'name': SearchDatastore_Task, 'duration_secs': 0.09206} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.106736] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.107045] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1836.107311] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.107484] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.107678] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1836.107992] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-567073b2-2e82-4a0f-911a-6a34ee2be58c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.128660] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1836.128823] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1836.129666] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c66141e7-9518-4c67-aa72-b183856c22fc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.137599] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1836.137599] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527d5665-45cd-5e87-f17d-3c7989c06958" [ 1836.137599] env[63241]: _type = "Task" [ 1836.137599] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.147590] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527d5665-45cd-5e87-f17d-3c7989c06958, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.225780] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821063, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.307645] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.531271] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821061, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.569607] env[63241]: DEBUG oslo_vmware.api [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821065, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.614868] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e134593-954d-466d-bdba-79d0b44a6cef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.624835] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6deae7f0-5a5d-4207-8eda-df19b782e600 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.667102] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1153149c-86eb-4b56-92eb-10fee892e004 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.679030] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527d5665-45cd-5e87-f17d-3c7989c06958, 'name': SearchDatastore_Task, 'duration_secs': 0.095412} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.682327] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17ec30dd-a1d0-445e-b6e1-e7565be816ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.685989] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5126e6b3-d580-40aa-b280-fea162d5cabb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.696660] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1836.696660] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5212b62f-c48c-e88d-c87e-54d4366c5fbf" [ 1836.696660] env[63241]: _type = "Task" [ 1836.696660] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.707703] env[63241]: DEBUG nova.compute.provider_tree [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1836.730601] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5212b62f-c48c-e88d-c87e-54d4366c5fbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.735376] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821063, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.796120] env[63241]: DEBUG nova.network.neutron [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updated VIF entry in instance network info cache for port 55bc89c7-241d-48af-9915-9dd2f1afd2c0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1836.796565] env[63241]: DEBUG nova.network.neutron [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updating instance_info_cache with network_info: [{"id": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "address": "fa:16:3e:0e:96:49", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f97669d-a2", "ovs_interfaceid": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55bc89c7-241d-48af-9915-9dd2f1afd2c0", "address": "fa:16:3e:75:b6:69", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55bc89c7-24", "ovs_interfaceid": "55bc89c7-241d-48af-9915-9dd2f1afd2c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.802596] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "49d350ff-4932-4759-a3fa-53274c484ae6" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.804650] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "49d350ff-4932-4759-a3fa-53274c484ae6" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.804650] env[63241]: DEBUG nova.compute.manager [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Going to confirm migration 5 {{(pid=63241) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1836.804650] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821064, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.031056] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821061, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.198842} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.031495] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/509bfeca-5406-4a2d-b9c1-64cb54f16cd4/509bfeca-5406-4a2d-b9c1-64cb54f16cd4.vmdk to [datastore1] 01af6dc5-e0e7-4f8b-ad07-73af80c32577/01af6dc5-e0e7-4f8b-ad07-73af80c32577.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1837.032366] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497a681a-a9eb-4f44-a8be-b263c7f3e207 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.056571] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 01af6dc5-e0e7-4f8b-ad07-73af80c32577/01af6dc5-e0e7-4f8b-ad07-73af80c32577.vmdk or device None with type streamOptimized {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1837.056980] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d6b065c-dfcd-4fcf-888d-20c451c0eb63 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.081121] env[63241]: DEBUG oslo_vmware.api [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821065, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.082762] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1837.082762] env[63241]: value = "task-1821066" [ 1837.082762] env[63241]: _type = "Task" [ 1837.082762] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.093173] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821066, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.212223] env[63241]: DEBUG nova.scheduler.client.report [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1837.223806] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5212b62f-c48c-e88d-c87e-54d4366c5fbf, 'name': SearchDatastore_Task, 'duration_secs': 0.054409} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.224418] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.224739] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 426b001f-949f-4814-9c10-c7f44b6da44a/426b001f-949f-4814-9c10-c7f44b6da44a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1837.224996] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2dd7590c-d169-4b2f-aaa6-299c1cce87a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.230253] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821063, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.627241} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.230842] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 779d2380-be6c-4fdb-8755-10e99f8a6fd9/779d2380-be6c-4fdb-8755-10e99f8a6fd9.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1837.231093] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1837.231574] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12bce5bb-0f58-47c8-998a-3d426998057a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.235438] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1837.235438] env[63241]: value = "task-1821067" [ 1837.235438] env[63241]: _type = "Task" [ 1837.235438] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.241283] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1837.241283] env[63241]: value = "task-1821068" [ 1837.241283] env[63241]: _type = "Task" [ 1837.241283] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.248199] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821067, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.254626] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821068, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.303702] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821064, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.854669} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.303976] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 6f3cbd23-30b9-4502-be07-2edd0a701291/6f3cbd23-30b9-4502-be07-2edd0a701291.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1837.304238] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1837.304522] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4aa74ce6-7218-49a1-9270-d7276040364b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.307097] env[63241]: DEBUG oslo_concurrency.lockutils [req-91092605-955a-4f8e-a83f-cb8dfae613ce req-1cf3832d-ef61-48e3-b9da-93ea5474d11b service nova] Releasing lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.322054] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1837.322054] env[63241]: value = "task-1821069" [ 1837.322054] env[63241]: _type = "Task" [ 1837.322054] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.341816] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.349170] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.349460] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquired lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.349699] env[63241]: DEBUG nova.network.neutron [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1837.349968] env[63241]: DEBUG nova.objects.instance [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lazy-loading 'info_cache' on Instance uuid 49d350ff-4932-4759-a3fa-53274c484ae6 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1837.572880] env[63241]: DEBUG oslo_vmware.api [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821065, 'name': ReconfigVM_Task, 'duration_secs': 1.388686} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.573724] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.574093] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Reconfigured VM to attach interface {{(pid=63241) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1837.595186] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821066, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.716893] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.717492] env[63241]: DEBUG nova.compute.manager [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1837.749652] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821067, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484286} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.750313] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 426b001f-949f-4814-9c10-c7f44b6da44a/426b001f-949f-4814-9c10-c7f44b6da44a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1837.750534] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1837.750787] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ac3ffc6-5a97-41a0-ac0b-c3d898a7f526 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.755257] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821068, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089181} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.756502] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1837.757201] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b37332-afe9-4d5f-9f57-1707f1237a8b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.761432] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1837.761432] env[63241]: value = "task-1821070" [ 1837.761432] env[63241]: _type = "Task" [ 1837.761432] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.784323] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 779d2380-be6c-4fdb-8755-10e99f8a6fd9/779d2380-be6c-4fdb-8755-10e99f8a6fd9.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1837.786093] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10304c64-9437-4510-b4e0-e58d9b0952ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.803586] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821070, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.810301] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1837.810301] env[63241]: value = "task-1821071" [ 1837.810301] env[63241]: _type = "Task" [ 1837.810301] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.820262] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821071, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.830783] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821069, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102117} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.831060] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1837.831945] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbe33bd-2557-40dc-bb5e-3fbbb65bd7d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.855285] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 6f3cbd23-30b9-4502-be07-2edd0a701291/6f3cbd23-30b9-4502-be07-2edd0a701291.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1837.857846] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d0d75cd-e143-41c6-8585-8f022a392fa0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.881288] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1837.881288] env[63241]: value = "task-1821072" [ 1837.881288] env[63241]: _type = "Task" [ 1837.881288] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.892352] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821072, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.080115] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6642f440-9ce6-44fe-bfe2-e0a5c5a5fce6 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-6b96988b-cc79-41d7-a17d-277ae5aeb4dc-55bc89c7-241d-48af-9915-9dd2f1afd2c0" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.882s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.088435] env[63241]: DEBUG nova.network.neutron [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance_info_cache with network_info: [{"id": "1c88bbab-4bd7-4ea5-858c-317020381bac", "address": "fa:16:3e:b5:7a:04", "network": {"id": "1866f3ce-4433-40fd-8dcc-0a18ba96325a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1084294295-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c64d07a686b414f93ec4c599307498f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dacd109c-2442-41b8-b612-7ed3efbdaa94", "external-id": "nsx-vlan-transportzone-940", "segmentation_id": 940, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c88bbab-4b", "ovs_interfaceid": "1c88bbab-4bd7-4ea5-858c-317020381bac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1838.099409] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821066, 'name': ReconfigVM_Task, 'duration_secs': 0.594362} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.099738] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 01af6dc5-e0e7-4f8b-ad07-73af80c32577/01af6dc5-e0e7-4f8b-ad07-73af80c32577.vmdk or device None with type streamOptimized {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1838.101172] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'boot_index': 0, 'encrypted': False, 'guest_format': None, 'disk_bus': None, 'encryption_format': None, 'encryption_options': None, 'encryption_secret_uuid': None, 'device_type': 'disk', 'size': 0, 'image_id': 'e128f8d9-813d-4846-9a6e-b4c4717cd5b4'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377187', 'volume_id': '9d900cbe-b561-4b8b-b228-5471bffb1998', 'name': 'volume-9d900cbe-b561-4b8b-b228-5471bffb1998', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '01af6dc5-e0e7-4f8b-ad07-73af80c32577', 'attached_at': '', 'detached_at': '', 'volume_id': '9d900cbe-b561-4b8b-b228-5471bffb1998', 'serial': '9d900cbe-b561-4b8b-b228-5471bffb1998'}, 'boot_index': None, 'attachment_id': '195e56e2-8026-4d35-a787-5b92f71b13de', 'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sdb', 'device_type': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=63241) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1838.101377] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1838.101569] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377187', 'volume_id': '9d900cbe-b561-4b8b-b228-5471bffb1998', 'name': 'volume-9d900cbe-b561-4b8b-b228-5471bffb1998', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '01af6dc5-e0e7-4f8b-ad07-73af80c32577', 'attached_at': '', 'detached_at': '', 'volume_id': '9d900cbe-b561-4b8b-b228-5471bffb1998', 'serial': '9d900cbe-b561-4b8b-b228-5471bffb1998'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1838.102668] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68eeab2d-d24f-4e66-bcef-4a5417f49671 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.119772] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4f5515-d282-407f-b889-6114407f04cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.145106] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] volume-9d900cbe-b561-4b8b-b228-5471bffb1998/volume-9d900cbe-b561-4b8b-b228-5471bffb1998.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1838.145466] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9023fac8-ce05-4167-aee8-94630d845720 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.163856] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1838.163856] env[63241]: value = "task-1821073" [ 1838.163856] env[63241]: _type = "Task" [ 1838.163856] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.176524] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821073, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.223142] env[63241]: DEBUG nova.compute.utils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1838.224567] env[63241]: DEBUG nova.compute.manager [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1838.224747] env[63241]: DEBUG nova.network.neutron [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1838.267375] env[63241]: DEBUG nova.policy [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8f8e170296b46d6a108092608492772', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e139fb67702e42d8a8b2401cc6be9303', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1838.272218] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821070, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063418} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.272480] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1838.273273] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3e6c04-9244-4f24-9cfa-ecc89eb4263c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.297508] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 426b001f-949f-4814-9c10-c7f44b6da44a/426b001f-949f-4814-9c10-c7f44b6da44a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1838.297841] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b1f2245-bff2-4642-96d5-e282adedcbdb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.322129] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821071, 'name': ReconfigVM_Task, 'duration_secs': 0.287127} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.323356] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 779d2380-be6c-4fdb-8755-10e99f8a6fd9/779d2380-be6c-4fdb-8755-10e99f8a6fd9.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1838.324071] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1838.324071] env[63241]: value = "task-1821074" [ 1838.324071] env[63241]: _type = "Task" [ 1838.324071] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.324265] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d58e0590-a3e3-4218-aaea-af61748cabc7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.346802] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821074, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.348551] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1838.348551] env[63241]: value = "task-1821075" [ 1838.348551] env[63241]: _type = "Task" [ 1838.348551] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.362683] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821075, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.393577] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821072, 'name': ReconfigVM_Task, 'duration_secs': 0.275442} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.393863] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 6f3cbd23-30b9-4502-be07-2edd0a701291/6f3cbd23-30b9-4502-be07-2edd0a701291.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1838.394506] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d805dbd-6971-4a52-8dfc-5774c875c1c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.402535] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1838.402535] env[63241]: value = "task-1821076" [ 1838.402535] env[63241]: _type = "Task" [ 1838.402535] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.418910] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821076, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.531366] env[63241]: DEBUG nova.network.neutron [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Successfully created port: 958d4582-a277-49ee-b43b-f1f0b83217d0 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1838.591467] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Releasing lock "refresh_cache-49d350ff-4932-4759-a3fa-53274c484ae6" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1838.591882] env[63241]: DEBUG nova.objects.instance [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lazy-loading 'migration_context' on Instance uuid 49d350ff-4932-4759-a3fa-53274c484ae6 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1838.676447] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821073, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.727796] env[63241]: DEBUG nova.compute.manager [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1838.837457] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821074, 'name': ReconfigVM_Task, 'duration_secs': 0.33419} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.837795] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 426b001f-949f-4814-9c10-c7f44b6da44a/426b001f-949f-4814-9c10-c7f44b6da44a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1838.838414] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a081778-6944-4c5a-b6b5-677afb4c5345 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.846508] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1838.846508] env[63241]: value = "task-1821077" [ 1838.846508] env[63241]: _type = "Task" [ 1838.846508] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.858130] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821077, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.861283] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821075, 'name': Rename_Task, 'duration_secs': 0.160445} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.861547] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1838.861789] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a110d59-178f-43be-b4cf-f0bd7327cae6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.869127] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1838.869127] env[63241]: value = "task-1821078" [ 1838.869127] env[63241]: _type = "Task" [ 1838.869127] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.877580] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821078, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.912902] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821076, 'name': Rename_Task, 'duration_secs': 0.173708} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.913218] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1838.913533] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53bc0408-1dc1-4cd4-8398-66b3d74f3615 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.920814] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1838.920814] env[63241]: value = "task-1821079" [ 1838.920814] env[63241]: _type = "Task" [ 1838.920814] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.929469] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821079, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.095017] env[63241]: DEBUG nova.objects.base [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Object Instance<49d350ff-4932-4759-a3fa-53274c484ae6> lazy-loaded attributes: info_cache,migration_context {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1839.095860] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fba2d0c-19ff-4026-ab4b-d1fbc21dbe3d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.117780] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b35673be-a2d7-4f09-8e0c-3d86a6dd0abc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.126028] env[63241]: DEBUG oslo_vmware.api [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1839.126028] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d995aa-86b9-2f49-b391-b3e253848e28" [ 1839.126028] env[63241]: _type = "Task" [ 1839.126028] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.135791] env[63241]: DEBUG oslo_vmware.api [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d995aa-86b9-2f49-b391-b3e253848e28, 'name': SearchDatastore_Task, 'duration_secs': 0.008738} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.136415] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.136415] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.175297] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821073, 'name': ReconfigVM_Task, 'duration_secs': 0.928207} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.175591] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Reconfigured VM instance instance-00000056 to attach disk [datastore1] volume-9d900cbe-b561-4b8b-b228-5471bffb1998/volume-9d900cbe-b561-4b8b-b228-5471bffb1998.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1839.180744] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa7ba4b8-6ec9-443e-9568-481f62c66b7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.196282] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1839.196282] env[63241]: value = "task-1821080" [ 1839.196282] env[63241]: _type = "Task" [ 1839.196282] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.204756] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821080, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.359410] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821077, 'name': Rename_Task, 'duration_secs': 0.193997} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.359862] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1839.360124] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa7cde89-b728-4cf4-bdd0-73beafe52159 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.369929] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1839.369929] env[63241]: value = "task-1821081" [ 1839.369929] env[63241]: _type = "Task" [ 1839.369929] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.383629] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821078, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.387201] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821081, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.432067] env[63241]: DEBUG oslo_vmware.api [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821079, 'name': PowerOnVM_Task, 'duration_secs': 0.503605} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.432293] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1839.432503] env[63241]: INFO nova.compute.manager [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Took 11.64 seconds to spawn the instance on the hypervisor. [ 1839.432698] env[63241]: DEBUG nova.compute.manager [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1839.433516] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86274bb4-f7c8-4a86-b1fd-05ed15fc2a2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.709637] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821080, 'name': ReconfigVM_Task, 'duration_secs': 0.227229} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.709637] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377187', 'volume_id': '9d900cbe-b561-4b8b-b228-5471bffb1998', 'name': 'volume-9d900cbe-b561-4b8b-b228-5471bffb1998', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '01af6dc5-e0e7-4f8b-ad07-73af80c32577', 'attached_at': '', 'detached_at': '', 'volume_id': '9d900cbe-b561-4b8b-b228-5471bffb1998', 'serial': '9d900cbe-b561-4b8b-b228-5471bffb1998'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1839.710180] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24e71da6-1a22-4b6f-9353-ea4756a74dd3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.717414] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1839.717414] env[63241]: value = "task-1821082" [ 1839.717414] env[63241]: _type = "Task" [ 1839.717414] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.727958] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821082, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.741716] env[63241]: DEBUG nova.compute.manager [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1839.784239] env[63241]: DEBUG nova.virt.hardware [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1839.784523] env[63241]: DEBUG nova.virt.hardware [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1839.785289] env[63241]: DEBUG nova.virt.hardware [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1839.785289] env[63241]: DEBUG nova.virt.hardware [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1839.785289] env[63241]: DEBUG nova.virt.hardware [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1839.785289] env[63241]: DEBUG nova.virt.hardware [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1839.786093] env[63241]: DEBUG nova.virt.hardware [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1839.786093] env[63241]: DEBUG nova.virt.hardware [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1839.786325] env[63241]: DEBUG nova.virt.hardware [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1839.786539] env[63241]: DEBUG nova.virt.hardware [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1839.786739] env[63241]: DEBUG nova.virt.hardware [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1839.787730] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb5a87e-a33e-4b2d-8a90-90dbe98d8dde {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.797975] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c43c77f-ab9b-4d3e-a924-5a5b7a179283 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.893974] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821081, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.901332] env[63241]: DEBUG oslo_vmware.api [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821078, 'name': PowerOnVM_Task, 'duration_secs': 0.641521} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.902078] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1839.903028] env[63241]: INFO nova.compute.manager [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Took 14.54 seconds to spawn the instance on the hypervisor. [ 1839.903028] env[63241]: DEBUG nova.compute.manager [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1839.904137] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee39db1-bd82-4d10-9e88-b21af0708d96 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.955137] env[63241]: INFO nova.compute.manager [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Took 20.46 seconds to build instance. [ 1840.049061] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4332f67b-ec5f-418a-8ec2-993429c15a15 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.057151] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0377fe-dde8-4b51-8009-79811b3455a4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.088602] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f8308a-f41c-495d-b3b0-3268afa07949 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.097135] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09064407-5edc-4cc0-83f3-4dd6192deaf0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.111128] env[63241]: DEBUG nova.compute.provider_tree [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1840.186209] env[63241]: DEBUG nova.network.neutron [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Successfully updated port: 958d4582-a277-49ee-b43b-f1f0b83217d0 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1840.190688] env[63241]: DEBUG nova.compute.manager [req-a3eee7fb-b037-43ac-9f52-b37128bacfca req-9c25cd8a-09b2-4177-9ace-2ccd7d77c4fc service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Received event network-vif-plugged-958d4582-a277-49ee-b43b-f1f0b83217d0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1840.190688] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3eee7fb-b037-43ac-9f52-b37128bacfca req-9c25cd8a-09b2-4177-9ace-2ccd7d77c4fc service nova] Acquiring lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.190688] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3eee7fb-b037-43ac-9f52-b37128bacfca req-9c25cd8a-09b2-4177-9ace-2ccd7d77c4fc service nova] Lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.190688] env[63241]: DEBUG oslo_concurrency.lockutils [req-a3eee7fb-b037-43ac-9f52-b37128bacfca req-9c25cd8a-09b2-4177-9ace-2ccd7d77c4fc service nova] Lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.190886] env[63241]: DEBUG nova.compute.manager [req-a3eee7fb-b037-43ac-9f52-b37128bacfca req-9c25cd8a-09b2-4177-9ace-2ccd7d77c4fc service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] No waiting events found dispatching network-vif-plugged-958d4582-a277-49ee-b43b-f1f0b83217d0 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1840.191052] env[63241]: WARNING nova.compute.manager [req-a3eee7fb-b037-43ac-9f52-b37128bacfca req-9c25cd8a-09b2-4177-9ace-2ccd7d77c4fc service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Received unexpected event network-vif-plugged-958d4582-a277-49ee-b43b-f1f0b83217d0 for instance with vm_state building and task_state spawning. [ 1840.227859] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821082, 'name': Rename_Task, 'duration_secs': 0.202671} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.228155] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1840.228407] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4043d864-c134-460d-bc79-8ea38894806d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.236345] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1840.236345] env[63241]: value = "task-1821083" [ 1840.236345] env[63241]: _type = "Task" [ 1840.236345] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.248714] env[63241]: DEBUG oslo_concurrency.lockutils [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "interface-6b96988b-cc79-41d7-a17d-277ae5aeb4dc-55bc89c7-241d-48af-9915-9dd2f1afd2c0" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.248942] env[63241]: DEBUG oslo_concurrency.lockutils [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-6b96988b-cc79-41d7-a17d-277ae5aeb4dc-55bc89c7-241d-48af-9915-9dd2f1afd2c0" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.250732] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821083, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.350091] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "29b6caa8-a07c-494b-b776-b08affa45c87" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.350383] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.350601] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "29b6caa8-a07c-494b-b776-b08affa45c87-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.350789] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.350958] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.353326] env[63241]: INFO nova.compute.manager [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Terminating instance [ 1840.355139] env[63241]: DEBUG nova.compute.manager [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1840.355338] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1840.356195] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85586b72-b377-4568-9b1f-99a1317198c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.364625] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1840.364876] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce7b4b1d-b0c0-4e22-8e20-ba3596eef771 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.373522] env[63241]: DEBUG oslo_vmware.api [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1840.373522] env[63241]: value = "task-1821084" [ 1840.373522] env[63241]: _type = "Task" [ 1840.373522] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.392291] env[63241]: DEBUG oslo_vmware.api [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821081, 'name': PowerOnVM_Task, 'duration_secs': 0.98382} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.392606] env[63241]: DEBUG oslo_vmware.api [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.392909] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1840.393176] env[63241]: INFO nova.compute.manager [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Took 9.26 seconds to spawn the instance on the hypervisor. [ 1840.393386] env[63241]: DEBUG nova.compute.manager [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1840.394328] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2959d5dc-647e-4e7b-add6-9eb53fc185e5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.429040] env[63241]: INFO nova.compute.manager [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Took 31.04 seconds to build instance. [ 1840.458058] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6d8494b-b10a-4401-982e-54a057be0105 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "6f3cbd23-30b9-4502-be07-2edd0a701291" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.983s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.633237] env[63241]: ERROR nova.scheduler.client.report [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [req-f8341085-0e44-408f-849b-1b69e1a6e625] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f8341085-0e44-408f-849b-1b69e1a6e625"}]} [ 1840.649160] env[63241]: DEBUG nova.scheduler.client.report [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1840.664277] env[63241]: DEBUG nova.scheduler.client.report [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1840.664716] env[63241]: DEBUG nova.compute.provider_tree [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1840.677309] env[63241]: DEBUG nova.scheduler.client.report [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1840.688949] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "refresh_cache-35f0c615-3e10-4bdf-aa8d-181f72c1c699" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.689242] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired lock "refresh_cache-35f0c615-3e10-4bdf-aa8d-181f72c1c699" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.689459] env[63241]: DEBUG nova.network.neutron [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1840.705337] env[63241]: DEBUG nova.scheduler.client.report [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1840.755113] env[63241]: DEBUG oslo_concurrency.lockutils [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.755113] env[63241]: DEBUG oslo_concurrency.lockutils [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.755113] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821083, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.757923] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e2ea0f-d9fb-40fd-851a-93215b6fc910 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.780235] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c541517-609d-473b-9e55-2fffaeb87611 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.811767] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Reconfiguring VM to detach interface {{(pid=63241) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1840.816986] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2dae2fbf-8d62-4455-889b-e2731402a3c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.846389] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1840.846389] env[63241]: value = "task-1821085" [ 1840.846389] env[63241]: _type = "Task" [ 1840.846389] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.865405] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.888111] env[63241]: DEBUG oslo_vmware.api [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821084, 'name': PowerOffVM_Task, 'duration_secs': 0.284361} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.892081] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1840.892291] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1840.893336] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cbd79f3f-3049-497e-9a28-496d00386866 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.911728] env[63241]: INFO nova.compute.manager [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Took 14.35 seconds to build instance. [ 1840.932901] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f1dfa0bf-894a-4597-a0e5-7205f049d55f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.605s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.076544] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9003cd40-1f79-4bba-b4d6-b38d26949994 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.086858] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a7c3b2-a949-4774-9caa-27e8abb04062 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.092993] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1841.093395] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1841.093680] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Deleting the datastore file [datastore1] 29b6caa8-a07c-494b-b776-b08affa45c87 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1841.094380] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46fbf8be-8b51-4667-a4a3-3a244e46d1d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.128342] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1009b5-c452-4b3f-bdc5-13e95705bf37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.131725] env[63241]: DEBUG oslo_vmware.api [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1841.131725] env[63241]: value = "task-1821087" [ 1841.131725] env[63241]: _type = "Task" [ 1841.131725] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.140216] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9fe3fc-2e38-40ac-a834-b7b13aa2f25c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.148064] env[63241]: DEBUG oslo_vmware.api [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.160586] env[63241]: DEBUG nova.compute.provider_tree [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1841.223459] env[63241]: DEBUG nova.network.neutron [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1841.249149] env[63241]: DEBUG oslo_vmware.api [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821083, 'name': PowerOnVM_Task, 'duration_secs': 0.725451} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.251657] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1841.339712] env[63241]: DEBUG oslo_concurrency.lockutils [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "94a604da-ad3d-415a-aa92-d648e3da803d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.340142] env[63241]: DEBUG oslo_concurrency.lockutils [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "94a604da-ad3d-415a-aa92-d648e3da803d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.340500] env[63241]: DEBUG oslo_concurrency.lockutils [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "94a604da-ad3d-415a-aa92-d648e3da803d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.341075] env[63241]: DEBUG oslo_concurrency.lockutils [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "94a604da-ad3d-415a-aa92-d648e3da803d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.341075] env[63241]: DEBUG oslo_concurrency.lockutils [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "94a604da-ad3d-415a-aa92-d648e3da803d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.343876] env[63241]: INFO nova.compute.manager [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Terminating instance [ 1841.346559] env[63241]: DEBUG nova.compute.manager [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1841.346664] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1841.348061] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e6a7cd-fa86-4fee-9aaf-ea950438c175 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.365596] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.369020] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1841.369020] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1b01237-06e1-46aa-a54d-c82e6174bfae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.378046] env[63241]: DEBUG oslo_vmware.api [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1841.378046] env[63241]: value = "task-1821088" [ 1841.378046] env[63241]: _type = "Task" [ 1841.378046] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.388584] env[63241]: DEBUG oslo_vmware.api [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821088, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.390907] env[63241]: DEBUG nova.compute.manager [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1841.391965] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae14804-40c9-4492-8808-5607c27aed29 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.414519] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d428e9ca-7f2b-4727-8b74-2545cd428028 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "426b001f-949f-4814-9c10-c7f44b6da44a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.863s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.477456] env[63241]: DEBUG nova.network.neutron [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Updating instance_info_cache with network_info: [{"id": "958d4582-a277-49ee-b43b-f1f0b83217d0", "address": "fa:16:3e:c0:86:c9", "network": {"id": "1e0ff0ab-bb23-4187-abf3-c1d13c2971ac", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-634449129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e139fb67702e42d8a8b2401cc6be9303", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap958d4582-a2", "ovs_interfaceid": "958d4582-a277-49ee-b43b-f1f0b83217d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.645557] env[63241]: DEBUG oslo_vmware.api [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.323876} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.645944] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1841.646216] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1841.646431] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1841.646612] env[63241]: INFO nova.compute.manager [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1841.646872] env[63241]: DEBUG oslo.service.loopingcall [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1841.647080] env[63241]: DEBUG nova.compute.manager [-] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1841.647174] env[63241]: DEBUG nova.network.neutron [-] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1841.705967] env[63241]: DEBUG nova.scheduler.client.report [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 154 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1841.706291] env[63241]: DEBUG nova.compute.provider_tree [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 154 to 155 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1841.706473] env[63241]: DEBUG nova.compute.provider_tree [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1841.811899] env[63241]: DEBUG nova.compute.manager [req-7eb4fd6c-916f-4d29-8d83-18ee40f08bd5 req-e0a685a1-4dd5-4de0-881d-d240af20942f service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Received event network-changed-240bd985-f430-47f9-83a8-287f0c345a36 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1841.812179] env[63241]: DEBUG nova.compute.manager [req-7eb4fd6c-916f-4d29-8d83-18ee40f08bd5 req-e0a685a1-4dd5-4de0-881d-d240af20942f service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Refreshing instance network info cache due to event network-changed-240bd985-f430-47f9-83a8-287f0c345a36. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1841.812337] env[63241]: DEBUG oslo_concurrency.lockutils [req-7eb4fd6c-916f-4d29-8d83-18ee40f08bd5 req-e0a685a1-4dd5-4de0-881d-d240af20942f service nova] Acquiring lock "refresh_cache-6f3cbd23-30b9-4502-be07-2edd0a701291" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.812486] env[63241]: DEBUG oslo_concurrency.lockutils [req-7eb4fd6c-916f-4d29-8d83-18ee40f08bd5 req-e0a685a1-4dd5-4de0-881d-d240af20942f service nova] Acquired lock "refresh_cache-6f3cbd23-30b9-4502-be07-2edd0a701291" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.812648] env[63241]: DEBUG nova.network.neutron [req-7eb4fd6c-916f-4d29-8d83-18ee40f08bd5 req-e0a685a1-4dd5-4de0-881d-d240af20942f service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Refreshing network info cache for port 240bd985-f430-47f9-83a8-287f0c345a36 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1841.857944] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.891610] env[63241]: DEBUG oslo_vmware.api [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821088, 'name': PowerOffVM_Task, 'duration_secs': 0.324159} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.891900] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1841.892125] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1841.892362] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ba2a9aa-3807-4445-8d66-fd2ca73ba439 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.915387] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3616a4ef-13f4-455e-b6e1-e5b75e9b7f05 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 43.917s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.980767] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Releasing lock "refresh_cache-35f0c615-3e10-4bdf-aa8d-181f72c1c699" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.981364] env[63241]: DEBUG nova.compute.manager [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Instance network_info: |[{"id": "958d4582-a277-49ee-b43b-f1f0b83217d0", "address": "fa:16:3e:c0:86:c9", "network": {"id": "1e0ff0ab-bb23-4187-abf3-c1d13c2971ac", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-634449129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e139fb67702e42d8a8b2401cc6be9303", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap958d4582-a2", "ovs_interfaceid": "958d4582-a277-49ee-b43b-f1f0b83217d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1841.981714] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:86:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '958d4582-a277-49ee-b43b-f1f0b83217d0', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1841.994025] env[63241]: DEBUG oslo.service.loopingcall [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1841.994025] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1841.994025] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34211550-eb11-46ce-9d0c-27e1bf6adf41 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.019350] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1842.019632] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1842.020696] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleting the datastore file [datastore1] 94a604da-ad3d-415a-aa92-d648e3da803d {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1842.021197] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3446a77a-4772-4a01-9983-2c1d79fdd7e8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.024973] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1842.024973] env[63241]: value = "task-1821090" [ 1842.024973] env[63241]: _type = "Task" [ 1842.024973] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.039079] env[63241]: DEBUG oslo_vmware.api [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for the task: (returnval){ [ 1842.039079] env[63241]: value = "task-1821091" [ 1842.039079] env[63241]: _type = "Task" [ 1842.039079] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.048818] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821090, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.059193] env[63241]: DEBUG oslo_vmware.api [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.365349] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.409432] env[63241]: DEBUG nova.compute.manager [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Received event network-changed-958d4582-a277-49ee-b43b-f1f0b83217d0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1842.409432] env[63241]: DEBUG nova.compute.manager [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Refreshing instance network info cache due to event network-changed-958d4582-a277-49ee-b43b-f1f0b83217d0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1842.409965] env[63241]: DEBUG oslo_concurrency.lockutils [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] Acquiring lock "refresh_cache-35f0c615-3e10-4bdf-aa8d-181f72c1c699" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.409965] env[63241]: DEBUG oslo_concurrency.lockutils [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] Acquired lock "refresh_cache-35f0c615-3e10-4bdf-aa8d-181f72c1c699" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.409965] env[63241]: DEBUG nova.network.neutron [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Refreshing network info cache for port 958d4582-a277-49ee-b43b-f1f0b83217d0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1842.541400] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.541925] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.542114] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.542442] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.542561] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.544393] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821090, 'name': CreateVM_Task, 'duration_secs': 0.421314} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.544697] env[63241]: INFO nova.compute.manager [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Terminating instance [ 1842.552533] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1842.553492] env[63241]: DEBUG nova.compute.manager [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1842.553693] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1842.554569] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.554777] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.555158] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1842.556185] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401d104d-15aa-4773-8d7a-632c1380fc7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.559827] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98287342-b05b-4843-a08f-d49d34cba543 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.566533] env[63241]: DEBUG oslo_vmware.api [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Task: {'id': task-1821091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23074} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.567520] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1842.567520] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1842.568168] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1842.568168] env[63241]: INFO nova.compute.manager [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1842.568329] env[63241]: DEBUG oslo.service.loopingcall [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1842.568849] env[63241]: DEBUG nova.compute.manager [-] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1842.569094] env[63241]: DEBUG nova.network.neutron [-] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1842.574614] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1842.574614] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b33475-66c9-276e-1b22-7d44400d9f74" [ 1842.574614] env[63241]: _type = "Task" [ 1842.574614] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.574953] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1842.575587] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e52d0d0-c862-41be-b878-d536d8fb8aea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.586158] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b33475-66c9-276e-1b22-7d44400d9f74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.588649] env[63241]: DEBUG oslo_vmware.api [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1842.588649] env[63241]: value = "task-1821092" [ 1842.588649] env[63241]: _type = "Task" [ 1842.588649] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.598160] env[63241]: DEBUG oslo_vmware.api [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821092, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.706490] env[63241]: DEBUG nova.network.neutron [-] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1842.711040] env[63241]: DEBUG nova.network.neutron [req-7eb4fd6c-916f-4d29-8d83-18ee40f08bd5 req-e0a685a1-4dd5-4de0-881d-d240af20942f service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Updated VIF entry in instance network info cache for port 240bd985-f430-47f9-83a8-287f0c345a36. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1842.711434] env[63241]: DEBUG nova.network.neutron [req-7eb4fd6c-916f-4d29-8d83-18ee40f08bd5 req-e0a685a1-4dd5-4de0-881d-d240af20942f service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Updating instance_info_cache with network_info: [{"id": "240bd985-f430-47f9-83a8-287f0c345a36", "address": "fa:16:3e:0c:cc:32", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap240bd985-f4", "ovs_interfaceid": "240bd985-f430-47f9-83a8-287f0c345a36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1842.720305] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.583s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.860699] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.086910] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b33475-66c9-276e-1b22-7d44400d9f74, 'name': SearchDatastore_Task, 'duration_secs': 0.023441} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.087173] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.087417] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1843.087652] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.087802] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.088008] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1843.088325] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36f7ebaf-14c6-4070-a914-d9c2cfe39b6d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.102691] env[63241]: DEBUG oslo_vmware.api [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821092, 'name': PowerOffVM_Task, 'duration_secs': 0.515046} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.105304] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1843.105481] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1843.106635] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21fe885a-1e04-4664-ac35-0a7f1f00c214 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.108497] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1843.108724] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1843.109616] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab15cc6a-0f35-44e8-8a70-1abbeb17e828 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.116495] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1843.116495] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52740351-e814-3605-18b4-59a2c9742754" [ 1843.116495] env[63241]: _type = "Task" [ 1843.116495] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.125328] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52740351-e814-3605-18b4-59a2c9742754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.143774] env[63241]: DEBUG nova.network.neutron [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Updated VIF entry in instance network info cache for port 958d4582-a277-49ee-b43b-f1f0b83217d0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1843.144207] env[63241]: DEBUG nova.network.neutron [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Updating instance_info_cache with network_info: [{"id": "958d4582-a277-49ee-b43b-f1f0b83217d0", "address": "fa:16:3e:c0:86:c9", "network": {"id": "1e0ff0ab-bb23-4187-abf3-c1d13c2971ac", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-634449129-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e139fb67702e42d8a8b2401cc6be9303", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap958d4582-a2", "ovs_interfaceid": "958d4582-a277-49ee-b43b-f1f0b83217d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.211370] env[63241]: INFO nova.compute.manager [-] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Took 1.56 seconds to deallocate network for instance. [ 1843.213695] env[63241]: DEBUG oslo_concurrency.lockutils [req-7eb4fd6c-916f-4d29-8d83-18ee40f08bd5 req-e0a685a1-4dd5-4de0-881d-d240af20942f service nova] Releasing lock "refresh_cache-6f3cbd23-30b9-4502-be07-2edd0a701291" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.218843] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1843.219105] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1843.219301] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Deleting the datastore file [datastore1] 0e4a3b3a-4464-404f-9154-1ab6f97ae951 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1843.219855] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd77ba1d-374e-4055-b8a0-530a074d960f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.228653] env[63241]: DEBUG oslo_vmware.api [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for the task: (returnval){ [ 1843.228653] env[63241]: value = "task-1821094" [ 1843.228653] env[63241]: _type = "Task" [ 1843.228653] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.240447] env[63241]: DEBUG oslo_vmware.api [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.286669] env[63241]: INFO nova.scheduler.client.report [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted allocation for migration 9b0fd3a6-0f57-4279-b77c-70066c90abbc [ 1843.362555] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.409933] env[63241]: DEBUG nova.network.neutron [-] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.627929] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52740351-e814-3605-18b4-59a2c9742754, 'name': SearchDatastore_Task, 'duration_secs': 0.018194} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.628851] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38cd8851-2e55-435d-bb8b-e96bff8bc134 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.635198] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1843.635198] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52263fd6-15d1-8c23-9ac2-cb8f96ae3523" [ 1843.635198] env[63241]: _type = "Task" [ 1843.635198] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.644495] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52263fd6-15d1-8c23-9ac2-cb8f96ae3523, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.647196] env[63241]: DEBUG oslo_concurrency.lockutils [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] Releasing lock "refresh_cache-35f0c615-3e10-4bdf-aa8d-181f72c1c699" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.647530] env[63241]: DEBUG nova.compute.manager [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Received event network-changed-b434fe5e-e77d-4974-8bd4-7226a359e28d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1843.647722] env[63241]: DEBUG nova.compute.manager [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Refreshing instance network info cache due to event network-changed-b434fe5e-e77d-4974-8bd4-7226a359e28d. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1843.647934] env[63241]: DEBUG oslo_concurrency.lockutils [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] Acquiring lock "refresh_cache-779d2380-be6c-4fdb-8755-10e99f8a6fd9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.648090] env[63241]: DEBUG oslo_concurrency.lockutils [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] Acquired lock "refresh_cache-779d2380-be6c-4fdb-8755-10e99f8a6fd9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.648258] env[63241]: DEBUG nova.network.neutron [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Refreshing network info cache for port b434fe5e-e77d-4974-8bd4-7226a359e28d {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1843.721058] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.721357] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.721588] env[63241]: DEBUG nova.objects.instance [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'resources' on Instance uuid 29b6caa8-a07c-494b-b776-b08affa45c87 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1843.741297] env[63241]: DEBUG oslo_vmware.api [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Task: {'id': task-1821094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206821} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.741538] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1843.741728] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1843.741902] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1843.742088] env[63241]: INFO nova.compute.manager [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1843.742326] env[63241]: DEBUG oslo.service.loopingcall [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1843.742513] env[63241]: DEBUG nova.compute.manager [-] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1843.742628] env[63241]: DEBUG nova.network.neutron [-] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1843.793283] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "49d350ff-4932-4759-a3fa-53274c484ae6" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.990s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.863806] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.913494] env[63241]: INFO nova.compute.manager [-] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Took 1.34 seconds to deallocate network for instance. [ 1843.935741] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "426b001f-949f-4814-9c10-c7f44b6da44a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.936205] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "426b001f-949f-4814-9c10-c7f44b6da44a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.936534] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "426b001f-949f-4814-9c10-c7f44b6da44a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.937082] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "426b001f-949f-4814-9c10-c7f44b6da44a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.937161] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "426b001f-949f-4814-9c10-c7f44b6da44a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.939361] env[63241]: INFO nova.compute.manager [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Terminating instance [ 1843.941246] env[63241]: DEBUG nova.compute.manager [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1843.941850] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1843.942254] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75c0dff-818f-4a52-be64-e859740600be {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.951992] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1843.952874] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e79a82b-e276-4840-96d6-0b954e069de3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.965496] env[63241]: DEBUG oslo_vmware.api [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1843.965496] env[63241]: value = "task-1821095" [ 1843.965496] env[63241]: _type = "Task" [ 1843.965496] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.977361] env[63241]: DEBUG oslo_vmware.api [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.150173] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52263fd6-15d1-8c23-9ac2-cb8f96ae3523, 'name': SearchDatastore_Task, 'duration_secs': 0.015825} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.150173] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.150173] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 35f0c615-3e10-4bdf-aa8d-181f72c1c699/35f0c615-3e10-4bdf-aa8d-181f72c1c699.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1844.152959] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7172892-55e6-4083-8352-d118e6dcc54d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.161504] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1844.161504] env[63241]: value = "task-1821096" [ 1844.161504] env[63241]: _type = "Task" [ 1844.161504] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.171680] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821096, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.372191] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.420060] env[63241]: DEBUG oslo_concurrency.lockutils [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.449306] env[63241]: DEBUG nova.network.neutron [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Updated VIF entry in instance network info cache for port b434fe5e-e77d-4974-8bd4-7226a359e28d. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1844.449707] env[63241]: DEBUG nova.network.neutron [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Updating instance_info_cache with network_info: [{"id": "b434fe5e-e77d-4974-8bd4-7226a359e28d", "address": "fa:16:3e:27:f8:85", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb434fe5e-e7", "ovs_interfaceid": "b434fe5e-e77d-4974-8bd4-7226a359e28d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.453365] env[63241]: DEBUG nova.compute.manager [req-b5782e40-3771-4620-acd2-592e2d126e3e req-9091d08b-7e50-4cb4-8ef7-b9904fc5102a service nova] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Received event network-vif-deleted-7b46f450-f9df-492c-bc52-8760f14afb90 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1844.453613] env[63241]: DEBUG nova.compute.manager [req-b5782e40-3771-4620-acd2-592e2d126e3e req-9091d08b-7e50-4cb4-8ef7-b9904fc5102a service nova] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Received event network-vif-deleted-c6bc0033-163b-4b6a-8577-877f59a975dc {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1844.453749] env[63241]: DEBUG nova.compute.manager [req-b5782e40-3771-4620-acd2-592e2d126e3e req-9091d08b-7e50-4cb4-8ef7-b9904fc5102a service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Received event network-vif-deleted-24131a23-55e1-4bd6-8813-5768da05438f {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1844.453961] env[63241]: INFO nova.compute.manager [req-b5782e40-3771-4620-acd2-592e2d126e3e req-9091d08b-7e50-4cb4-8ef7-b9904fc5102a service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Neutron deleted interface 24131a23-55e1-4bd6-8813-5768da05438f; detaching it from the instance and deleting it from the info cache [ 1844.454108] env[63241]: DEBUG nova.network.neutron [req-b5782e40-3771-4620-acd2-592e2d126e3e req-9091d08b-7e50-4cb4-8ef7-b9904fc5102a service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.477672] env[63241]: DEBUG oslo_vmware.api [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821095, 'name': PowerOffVM_Task, 'duration_secs': 0.4327} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.480556] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1844.480860] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1844.481494] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fffe43b4-5369-4aa9-a9b7-de82245612d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.491041] env[63241]: DEBUG nova.network.neutron [-] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.531122] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894db9e3-384e-4a21-80e0-e789a7f22205 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.539115] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dde5947-64ac-4753-a171-292ca3615470 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.571524] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf01bfc4-a52f-4e0b-9b4a-e7bbd022fe56 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.579417] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d01413-29e5-400b-a444-2c4660662000 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.592978] env[63241]: DEBUG nova.compute.provider_tree [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1844.672644] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821096, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.711446] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1844.711716] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1844.711908] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Deleting the datastore file [datastore1] 426b001f-949f-4814-9c10-c7f44b6da44a {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1844.712310] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-796e7884-96ab-4e6f-a8b1-7d48c763e4e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.719128] env[63241]: DEBUG oslo_vmware.api [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1844.719128] env[63241]: value = "task-1821098" [ 1844.719128] env[63241]: _type = "Task" [ 1844.719128] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.731295] env[63241]: DEBUG oslo_vmware.api [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.832515] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "49d350ff-4932-4759-a3fa-53274c484ae6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.833017] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "49d350ff-4932-4759-a3fa-53274c484ae6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.833371] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "49d350ff-4932-4759-a3fa-53274c484ae6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.833683] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "49d350ff-4932-4759-a3fa-53274c484ae6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.834091] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "49d350ff-4932-4759-a3fa-53274c484ae6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.837039] env[63241]: INFO nova.compute.manager [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Terminating instance [ 1844.839755] env[63241]: DEBUG nova.compute.manager [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1844.840158] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1844.841404] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c426f15b-5787-42dd-9d48-8c6cefcedd97 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.851331] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1844.851625] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a1bb337-c9bf-4b5e-a7fa-e400c257d1d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.863263] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.865039] env[63241]: DEBUG oslo_vmware.api [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1844.865039] env[63241]: value = "task-1821099" [ 1844.865039] env[63241]: _type = "Task" [ 1844.865039] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.877322] env[63241]: DEBUG oslo_vmware.api [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821099, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.956579] env[63241]: DEBUG oslo_concurrency.lockutils [req-d514502c-226b-4950-a8b4-c5a8bdb5d29d req-3556a6a1-ed77-4469-88d1-06d3229e4723 service nova] Releasing lock "refresh_cache-779d2380-be6c-4fdb-8755-10e99f8a6fd9" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.958660] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-67354983-c1e9-4a71-961c-3bb472fac300 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.970840] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20749ecd-87ba-4afa-8da1-6603ad26219c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.996133] env[63241]: INFO nova.compute.manager [-] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Took 1.25 seconds to deallocate network for instance. [ 1845.016102] env[63241]: DEBUG nova.compute.manager [req-b5782e40-3771-4620-acd2-592e2d126e3e req-9091d08b-7e50-4cb4-8ef7-b9904fc5102a service nova] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Detach interface failed, port_id=24131a23-55e1-4bd6-8813-5768da05438f, reason: Instance 0e4a3b3a-4464-404f-9154-1ab6f97ae951 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1845.095878] env[63241]: DEBUG nova.scheduler.client.report [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1845.172330] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821096, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.917672} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.172603] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 35f0c615-3e10-4bdf-aa8d-181f72c1c699/35f0c615-3e10-4bdf-aa8d-181f72c1c699.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1845.172831] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1845.173111] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70eb02ec-93ff-42f2-9ceb-5ebd70b9c658 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.182229] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1845.182229] env[63241]: value = "task-1821100" [ 1845.182229] env[63241]: _type = "Task" [ 1845.182229] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.194313] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821100, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.228986] env[63241]: DEBUG oslo_vmware.api [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.363259] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.373424] env[63241]: DEBUG oslo_vmware.api [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821099, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.522613] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.601915] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.879s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.603413] env[63241]: DEBUG oslo_concurrency.lockutils [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.183s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.603568] env[63241]: DEBUG nova.objects.instance [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lazy-loading 'resources' on Instance uuid 94a604da-ad3d-415a-aa92-d648e3da803d {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1845.624995] env[63241]: INFO nova.scheduler.client.report [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Deleted allocations for instance 29b6caa8-a07c-494b-b776-b08affa45c87 [ 1845.692801] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821100, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.182974} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.693100] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1845.693888] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26b7678-b8cc-49ab-b88a-9670e7c2e49d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.715616] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 35f0c615-3e10-4bdf-aa8d-181f72c1c699/35f0c615-3e10-4bdf-aa8d-181f72c1c699.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1845.716067] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5512712d-813d-4d1a-bc32-d1a277080eb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.737218] env[63241]: DEBUG oslo_vmware.api [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.858867} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.738378] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1845.738575] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1845.738760] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1845.738934] env[63241]: INFO nova.compute.manager [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Took 1.80 seconds to destroy the instance on the hypervisor. [ 1845.739182] env[63241]: DEBUG oslo.service.loopingcall [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1845.739426] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1845.739426] env[63241]: value = "task-1821101" [ 1845.739426] env[63241]: _type = "Task" [ 1845.739426] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.739610] env[63241]: DEBUG nova.compute.manager [-] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1845.739740] env[63241]: DEBUG nova.network.neutron [-] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1845.749318] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821101, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.866724] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.875595] env[63241]: DEBUG oslo_vmware.api [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821099, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.135759] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9f6c9303-7316-40ad-9d69-db32612c1f06 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "29b6caa8-a07c-494b-b776-b08affa45c87" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.785s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.251567] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821101, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.368493] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.372975] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4a700d-f274-4ddc-876c-093f5eb69eb2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.380987] env[63241]: DEBUG oslo_vmware.api [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821099, 'name': PowerOffVM_Task, 'duration_secs': 1.234763} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.382891] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1846.383110] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1846.383402] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-307da90c-c797-46e3-bac6-e54ea4ff2787 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.385574] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06aa31da-2e80-4128-b79b-51cdd60fb194 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.416731] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35cbd177-e2d8-415e-b7c0-f14d9cfecfb3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.425021] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269a00be-0128-4a0b-9df9-eae44bca31cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.438916] env[63241]: DEBUG nova.compute.provider_tree [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1846.485155] env[63241]: DEBUG nova.compute.manager [req-c65382e2-da4a-4d62-a45d-22105a026bc7 req-dc67b663-cb88-4a35-aea7-9dc03501eda9 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Received event network-vif-deleted-0b88ea9e-8926-4fb6-a2f7-3ba89336c41d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1846.485403] env[63241]: INFO nova.compute.manager [req-c65382e2-da4a-4d62-a45d-22105a026bc7 req-dc67b663-cb88-4a35-aea7-9dc03501eda9 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Neutron deleted interface 0b88ea9e-8926-4fb6-a2f7-3ba89336c41d; detaching it from the instance and deleting it from the info cache [ 1846.486028] env[63241]: DEBUG nova.network.neutron [req-c65382e2-da4a-4d62-a45d-22105a026bc7 req-dc67b663-cb88-4a35-aea7-9dc03501eda9 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.710127] env[63241]: DEBUG nova.network.neutron [-] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.753273] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821101, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.868639] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.958933] env[63241]: ERROR nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [req-4b82fe8f-458b-4525-ab68-d6dc732d576a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4b82fe8f-458b-4525-ab68-d6dc732d576a"}]} [ 1846.976787] env[63241]: DEBUG nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1846.990765] env[63241]: DEBUG nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1846.991035] env[63241]: DEBUG nova.compute.provider_tree [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1846.992918] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c0ffdef-1e9c-48cc-b4f2-122d8446e976 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.003632] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c49dd13-12f7-4754-aec8-c853bddcde6f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.014731] env[63241]: DEBUG nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1847.042594] env[63241]: DEBUG nova.compute.manager [req-c65382e2-da4a-4d62-a45d-22105a026bc7 req-dc67b663-cb88-4a35-aea7-9dc03501eda9 service nova] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Detach interface failed, port_id=0b88ea9e-8926-4fb6-a2f7-3ba89336c41d, reason: Instance 426b001f-949f-4814-9c10-c7f44b6da44a could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1847.043784] env[63241]: DEBUG nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1847.211583] env[63241]: INFO nova.compute.manager [-] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Took 1.47 seconds to deallocate network for instance. [ 1847.252574] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821101, 'name': ReconfigVM_Task, 'duration_secs': 1.099714} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.253765] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 35f0c615-3e10-4bdf-aa8d-181f72c1c699/35f0c615-3e10-4bdf-aa8d-181f72c1c699.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1847.254881] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a65911f-051e-4099-b0ee-25f13cc60b25 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.257428] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-220ecdf8-cf8f-472d-bd05-482713b26591 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.267072] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c268df-e5f8-4b45-94b8-95f59886778c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.270717] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1847.270717] env[63241]: value = "task-1821103" [ 1847.270717] env[63241]: _type = "Task" [ 1847.270717] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.307827] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7693c543-be4c-4fb3-ac8e-156fa83d5b7c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.314659] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821103, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.321165] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be73a3fa-48e8-44d5-b729-9c095b4a68a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.337351] env[63241]: DEBUG nova.compute.provider_tree [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1847.369868] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.718263] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.770568] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.770823] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.783863] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821103, 'name': Rename_Task, 'duration_secs': 0.194667} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.784611] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1847.784993] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd558ab5-a6da-45de-b18f-04cfed53326a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.794790] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1847.794790] env[63241]: value = "task-1821104" [ 1847.794790] env[63241]: _type = "Task" [ 1847.794790] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.806178] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.858025] env[63241]: ERROR nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] [req-480aa0a3-f03a-48fd-872e-8bb6ae9384ae] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-480aa0a3-f03a-48fd-872e-8bb6ae9384ae"}]} [ 1847.870078] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.874611] env[63241]: DEBUG nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1847.887977] env[63241]: DEBUG nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1847.888274] env[63241]: DEBUG nova.compute.provider_tree [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1847.899540] env[63241]: DEBUG nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1847.917560] env[63241]: DEBUG nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1848.123408] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fd3b9a-06fe-40c8-8ec5-c0e2a6c3a30d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.132066] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea4c677-0678-46a3-bc0e-0a1e4e87dfb5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.164288] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d85fca-1bdd-4855-958d-ceda6c1367da {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.172773] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fce48ac-499c-4c6d-908d-d531844b7ce0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.187170] env[63241]: DEBUG nova.compute.provider_tree [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1848.273976] env[63241]: DEBUG nova.compute.manager [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1848.305937] env[63241]: DEBUG oslo_vmware.api [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821104, 'name': PowerOnVM_Task, 'duration_secs': 0.478447} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.306263] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1848.306434] env[63241]: INFO nova.compute.manager [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Took 8.56 seconds to spawn the instance on the hypervisor. [ 1848.306622] env[63241]: DEBUG nova.compute.manager [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1848.307427] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0441b866-9037-4aa5-a816-9d1919694893 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.370738] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.717827] env[63241]: DEBUG nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 157 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1848.718176] env[63241]: DEBUG nova.compute.provider_tree [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 157 to 158 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1848.718412] env[63241]: DEBUG nova.compute.provider_tree [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1848.792380] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.827855] env[63241]: INFO nova.compute.manager [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Took 13.53 seconds to build instance. [ 1848.872021] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.104165] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.223881] env[63241]: DEBUG oslo_concurrency.lockutils [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.620s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.226820] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.704s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.226820] env[63241]: DEBUG nova.objects.instance [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lazy-loading 'resources' on Instance uuid 0e4a3b3a-4464-404f-9154-1ab6f97ae951 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.242515] env[63241]: INFO nova.scheduler.client.report [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Deleted allocations for instance 94a604da-ad3d-415a-aa92-d648e3da803d [ 1849.331027] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9e2eaab1-39d1-482e-9b5c-0a95ec2ddf80 tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.041s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.331027] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.226s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.331027] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.331027] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.331027] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.333410] env[63241]: INFO nova.compute.manager [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Terminating instance [ 1849.335354] env[63241]: DEBUG nova.compute.manager [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1849.335583] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1849.336517] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b55de6-ccee-453d-b549-aa931a012f7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.346984] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1849.347257] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-252477b7-c390-4071-a917-0709665a49cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.355915] env[63241]: DEBUG oslo_vmware.api [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1849.355915] env[63241]: value = "task-1821105" [ 1849.355915] env[63241]: _type = "Task" [ 1849.355915] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.365327] env[63241]: DEBUG oslo_vmware.api [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.373205] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.750097] env[63241]: DEBUG oslo_concurrency.lockutils [None req-864b838d-2d20-40d3-bb6b-c1dacd059625 tempest-ServersTestJSON-1880776855 tempest-ServersTestJSON-1880776855-project-member] Lock "94a604da-ad3d-415a-aa92-d648e3da803d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.410s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.866527] env[63241]: DEBUG oslo_vmware.api [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821105, 'name': PowerOffVM_Task, 'duration_secs': 0.162998} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.871787] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1849.871968] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1849.872431] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0d29738-cc2e-4f32-95e5-66cece896eba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.880453] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.944812] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdc387d-9150-4c4e-846d-ed4aa83ff570 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.952955] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce10dd4-bb5e-47b5-bee0-e7debdd335b4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.986966] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50563dfa-a877-4c35-b996-072a2066a5dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.995550] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed82172a-1ae0-49a6-a073-1ff40a0f57e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.010234] env[63241]: DEBUG nova.compute.provider_tree [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1850.377136] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.513484] env[63241]: DEBUG nova.scheduler.client.report [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1850.877102] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.018576] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.792s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.022228] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.304s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.025013] env[63241]: DEBUG nova.objects.instance [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lazy-loading 'resources' on Instance uuid 426b001f-949f-4814-9c10-c7f44b6da44a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1851.046605] env[63241]: INFO nova.scheduler.client.report [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Deleted allocations for instance 0e4a3b3a-4464-404f-9154-1ab6f97ae951 [ 1851.379173] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.554117] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b09f9d09-99ec-4c6f-8ce9-4d2a732325b0 tempest-ServersNegativeTestJSON-1201620273 tempest-ServersNegativeTestJSON-1201620273-project-member] Lock "0e4a3b3a-4464-404f-9154-1ab6f97ae951" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.012s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.721119] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4151b717-e8f9-47d0-ae55-ae8e27f0e023 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.729723] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b0e578-0050-48ef-9660-ade3c7e9d235 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.761811] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3aa49ce-e38a-48b7-8733-f5dfba8d0b4e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.770772] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b06e0ba-08e8-433d-b2a3-3166d9b15265 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.785596] env[63241]: DEBUG nova.compute.provider_tree [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1851.879349] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.947377] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1851.947377] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1851.947377] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Deleting the datastore file [datastore1] 35f0c615-3e10-4bdf-aa8d-181f72c1c699 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1851.948522] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69eba8c3-fb90-4062-9ab9-adf49c86571f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.950834] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1851.951160] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1851.951455] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleting the datastore file [datastore1] 49d350ff-4932-4759-a3fa-53274c484ae6 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1851.951794] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88c2b81f-45e8-4b92-abf2-73cc4cdabced {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.960194] env[63241]: DEBUG oslo_vmware.api [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for the task: (returnval){ [ 1851.960194] env[63241]: value = "task-1821107" [ 1851.960194] env[63241]: _type = "Task" [ 1851.960194] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.962188] env[63241]: DEBUG oslo_vmware.api [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for the task: (returnval){ [ 1851.962188] env[63241]: value = "task-1821108" [ 1851.962188] env[63241]: _type = "Task" [ 1851.962188] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.975086] env[63241]: DEBUG oslo_vmware.api [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.978279] env[63241]: DEBUG oslo_vmware.api [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821108, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.292023] env[63241]: DEBUG nova.scheduler.client.report [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1852.379457] env[63241]: DEBUG oslo_vmware.api [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821085, 'name': ReconfigVM_Task, 'duration_secs': 11.249864} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.380476] env[63241]: DEBUG oslo_concurrency.lockutils [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.380476] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Reconfigured VM to detach interface {{(pid=63241) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1852.474099] env[63241]: DEBUG oslo_vmware.api [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Task: {'id': task-1821107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173183} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.474729] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1852.474922] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1852.475119] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1852.475299] env[63241]: INFO nova.compute.manager [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Took 3.14 seconds to destroy the instance on the hypervisor. [ 1852.475533] env[63241]: DEBUG oslo.service.loopingcall [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1852.475723] env[63241]: DEBUG nova.compute.manager [-] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1852.475819] env[63241]: DEBUG nova.network.neutron [-] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1852.480652] env[63241]: DEBUG oslo_vmware.api [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Task: {'id': task-1821108, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185583} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.482569] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1852.482764] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1852.482946] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1852.483147] env[63241]: INFO nova.compute.manager [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Took 7.64 seconds to destroy the instance on the hypervisor. [ 1852.483402] env[63241]: DEBUG oslo.service.loopingcall [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1852.483642] env[63241]: DEBUG nova.compute.manager [-] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1852.483752] env[63241]: DEBUG nova.network.neutron [-] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1852.798023] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.800357] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.008s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.802504] env[63241]: INFO nova.compute.claims [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1852.837041] env[63241]: INFO nova.scheduler.client.report [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Deleted allocations for instance 426b001f-949f-4814-9c10-c7f44b6da44a [ 1852.878348] env[63241]: DEBUG nova.compute.manager [req-bb443087-27a9-4083-984a-72fd5b735352 req-c507dfe7-8fcd-4399-b42e-6d1e793f13b3 service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Received event network-vif-deleted-958d4582-a277-49ee-b43b-f1f0b83217d0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1852.878592] env[63241]: INFO nova.compute.manager [req-bb443087-27a9-4083-984a-72fd5b735352 req-c507dfe7-8fcd-4399-b42e-6d1e793f13b3 service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Neutron deleted interface 958d4582-a277-49ee-b43b-f1f0b83217d0; detaching it from the instance and deleting it from the info cache [ 1852.878808] env[63241]: DEBUG nova.network.neutron [req-bb443087-27a9-4083-984a-72fd5b735352 req-c507dfe7-8fcd-4399-b42e-6d1e793f13b3 service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.907846] env[63241]: DEBUG nova.compute.manager [req-5c51590d-d0a7-4782-80ea-aee1f587f948 req-54d6cf9a-28b6-4170-bcac-40c0289a5918 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Received event network-vif-deleted-1c88bbab-4bd7-4ea5-858c-317020381bac {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1852.908117] env[63241]: INFO nova.compute.manager [req-5c51590d-d0a7-4782-80ea-aee1f587f948 req-54d6cf9a-28b6-4170-bcac-40c0289a5918 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Neutron deleted interface 1c88bbab-4bd7-4ea5-858c-317020381bac; detaching it from the instance and deleting it from the info cache [ 1852.908297] env[63241]: DEBUG nova.network.neutron [req-5c51590d-d0a7-4782-80ea-aee1f587f948 req-54d6cf9a-28b6-4170-bcac-40c0289a5918 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.347140] env[63241]: DEBUG nova.network.neutron [-] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.349217] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ea6337db-81ac-4620-acf8-5015927d8825 tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "426b001f-949f-4814-9c10-c7f44b6da44a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.413s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.383137] env[63241]: DEBUG nova.network.neutron [-] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.384512] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64f49299-2d40-4a05-babd-acc54aeb739d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.398417] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdaaa2bc-badd-485d-a73d-9a05293a856b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.410728] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-529c448c-20b2-4dd8-ac4c-c14a51f53d1a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.421906] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37fab8e0-2798-426a-b25d-2d5cfc8ab4e4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.444257] env[63241]: DEBUG nova.compute.manager [req-bb443087-27a9-4083-984a-72fd5b735352 req-c507dfe7-8fcd-4399-b42e-6d1e793f13b3 service nova] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Detach interface failed, port_id=958d4582-a277-49ee-b43b-f1f0b83217d0, reason: Instance 35f0c615-3e10-4bdf-aa8d-181f72c1c699 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1853.469473] env[63241]: DEBUG nova.compute.manager [req-5c51590d-d0a7-4782-80ea-aee1f587f948 req-54d6cf9a-28b6-4170-bcac-40c0289a5918 service nova] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Detach interface failed, port_id=1c88bbab-4bd7-4ea5-858c-317020381bac, reason: Instance 49d350ff-4932-4759-a3fa-53274c484ae6 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1853.851484] env[63241]: INFO nova.compute.manager [-] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Took 1.38 seconds to deallocate network for instance. [ 1853.863660] env[63241]: DEBUG oslo_concurrency.lockutils [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.863989] env[63241]: DEBUG oslo_concurrency.lockutils [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.864198] env[63241]: DEBUG nova.network.neutron [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1853.892790] env[63241]: INFO nova.compute.manager [-] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Took 1.41 seconds to deallocate network for instance. [ 1854.044376] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd44d080-cbb9-4b1a-ba99-60c80f968770 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.053517] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0411ac17-380c-4124-a647-fff7dc6f45a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.087219] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e11520-2721-4f46-9f33-20c8cb4dd5b1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.098025] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7738a44-01e1-42c8-87d8-8f2b7639385d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.114614] env[63241]: DEBUG nova.compute.provider_tree [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1854.358355] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.403475] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.603105] env[63241]: INFO nova.network.neutron [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Port 55bc89c7-241d-48af-9915-9dd2f1afd2c0 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1854.604345] env[63241]: DEBUG nova.network.neutron [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updating instance_info_cache with network_info: [{"id": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "address": "fa:16:3e:0e:96:49", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f97669d-a2", "ovs_interfaceid": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.621934] env[63241]: DEBUG nova.scheduler.client.report [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1854.938812] env[63241]: DEBUG nova.compute.manager [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Received event network-changed-6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1854.939043] env[63241]: DEBUG nova.compute.manager [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing instance network info cache due to event network-changed-6f97669d-a2c6-4625-a1b6-374f5565ebb0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1854.939295] env[63241]: DEBUG oslo_concurrency.lockutils [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] Acquiring lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.996995] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "interface-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-55bc89c7-241d-48af-9915-9dd2f1afd2c0" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.997295] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-55bc89c7-241d-48af-9915-9dd2f1afd2c0" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.997673] env[63241]: DEBUG nova.objects.instance [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'flavor' on Instance uuid d7d5b5a1-bfe9-43a1-b8f1-0a0048562530 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1855.106760] env[63241]: DEBUG oslo_concurrency.lockutils [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.108999] env[63241]: DEBUG oslo_concurrency.lockutils [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] Acquired lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.109236] env[63241]: DEBUG nova.network.neutron [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Refreshing network info cache for port 6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1855.126790] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.326s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.127370] env[63241]: DEBUG nova.compute.manager [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1855.130131] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.772s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.130413] env[63241]: DEBUG nova.objects.instance [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lazy-loading 'resources' on Instance uuid 35f0c615-3e10-4bdf-aa8d-181f72c1c699 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1855.600387] env[63241]: DEBUG nova.objects.instance [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'pci_requests' on Instance uuid d7d5b5a1-bfe9-43a1-b8f1-0a0048562530 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1855.611761] env[63241]: DEBUG oslo_concurrency.lockutils [None req-267c6334-bc3d-46c9-8649-66e75bd6c88d tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-6b96988b-cc79-41d7-a17d-277ae5aeb4dc-55bc89c7-241d-48af-9915-9dd2f1afd2c0" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 15.363s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.633389] env[63241]: DEBUG nova.compute.utils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1855.634762] env[63241]: DEBUG nova.compute.manager [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1855.634930] env[63241]: DEBUG nova.network.neutron [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1855.689283] env[63241]: DEBUG nova.policy [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ff916a4e9c6433e939d47c887fa3b98', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bfa018174324b20863367a034d512da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1855.845768] env[63241]: DEBUG nova.network.neutron [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updated VIF entry in instance network info cache for port 6f97669d-a2c6-4625-a1b6-374f5565ebb0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1855.846324] env[63241]: DEBUG nova.network.neutron [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updating instance_info_cache with network_info: [{"id": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "address": "fa:16:3e:0e:96:49", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f97669d-a2", "ovs_interfaceid": "6f97669d-a2c6-4625-a1b6-374f5565ebb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.872139] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b471a0d-a9bc-485b-b22f-152360e71bf5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.880920] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a20faf6-faf9-4ad2-9de3-3cce456f69e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.912880] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d00b41-171b-49d9-ba4d-1a34afd370cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.921095] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677225b8-d108-4cf3-bc7a-62908248dc9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.935809] env[63241]: DEBUG nova.compute.provider_tree [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1855.987942] env[63241]: DEBUG nova.network.neutron [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Successfully created port: 8a7a1e02-257b-4a7c-936b-c3d9f7596043 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1856.103240] env[63241]: DEBUG nova.objects.base [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1856.103447] env[63241]: DEBUG nova.network.neutron [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1856.141570] env[63241]: DEBUG nova.compute.manager [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1856.169450] env[63241]: DEBUG nova.policy [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54dc853b6f204a75ae7612f9fbd2d1f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ecafb0abbdc74501b22b20b797c4c60c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1856.350189] env[63241]: DEBUG oslo_concurrency.lockutils [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] Releasing lock "refresh_cache-6b96988b-cc79-41d7-a17d-277ae5aeb4dc" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.350667] env[63241]: DEBUG nova.compute.manager [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Received event network-changed-db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1856.350966] env[63241]: DEBUG nova.compute.manager [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Refreshing instance network info cache due to event network-changed-db56e1be-f5b4-4531-8573-93fe90bc8b34. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1856.351339] env[63241]: DEBUG oslo_concurrency.lockutils [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] Acquiring lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.351653] env[63241]: DEBUG oslo_concurrency.lockutils [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] Acquired lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.351933] env[63241]: DEBUG nova.network.neutron [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Refreshing network info cache for port db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1856.439182] env[63241]: DEBUG nova.scheduler.client.report [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1856.943872] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.814s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.946188] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.543s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.946395] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.972617] env[63241]: INFO nova.scheduler.client.report [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Deleted allocations for instance 35f0c615-3e10-4bdf-aa8d-181f72c1c699 [ 1856.974431] env[63241]: INFO nova.scheduler.client.report [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Deleted allocations for instance 49d350ff-4932-4759-a3fa-53274c484ae6 [ 1857.020344] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "037f539f-1bf1-4897-81b3-08c377b92211" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.020612] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.020833] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "037f539f-1bf1-4897-81b3-08c377b92211-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.021022] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.021229] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.023317] env[63241]: INFO nova.compute.manager [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Terminating instance [ 1857.029189] env[63241]: DEBUG nova.compute.manager [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1857.029189] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1857.029189] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716cfe6f-c193-4c30-937a-671019e3e548 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.037189] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1857.037427] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-edc606a5-3c1d-49da-b780-bf17aa2e4ae1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.046728] env[63241]: DEBUG oslo_vmware.api [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1857.046728] env[63241]: value = "task-1821110" [ 1857.046728] env[63241]: _type = "Task" [ 1857.046728] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.059365] env[63241]: DEBUG oslo_vmware.api [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821110, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.128581] env[63241]: DEBUG nova.network.neutron [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updated VIF entry in instance network info cache for port db56e1be-f5b4-4531-8573-93fe90bc8b34. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1857.128981] env[63241]: DEBUG nova.network.neutron [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updating instance_info_cache with network_info: [{"id": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "address": "fa:16:3e:9f:6a:40", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb56e1be-f5", "ovs_interfaceid": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.152370] env[63241]: DEBUG nova.compute.manager [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1857.182111] env[63241]: DEBUG nova.virt.hardware [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1857.182398] env[63241]: DEBUG nova.virt.hardware [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1857.182557] env[63241]: DEBUG nova.virt.hardware [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1857.182744] env[63241]: DEBUG nova.virt.hardware [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1857.182888] env[63241]: DEBUG nova.virt.hardware [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1857.183046] env[63241]: DEBUG nova.virt.hardware [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1857.183272] env[63241]: DEBUG nova.virt.hardware [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1857.183434] env[63241]: DEBUG nova.virt.hardware [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1857.183602] env[63241]: DEBUG nova.virt.hardware [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1857.183766] env[63241]: DEBUG nova.virt.hardware [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1857.183943] env[63241]: DEBUG nova.virt.hardware [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1857.184849] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4637225-b927-477a-ab68-85a5a4f97ae9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.193980] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2c1ec8-0b91-4ba7-8b2f-82663b5b04d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.484061] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ac478f22-b35f-404c-b955-afbdebc104b0 tempest-DeleteServersTestJSON-1966896593 tempest-DeleteServersTestJSON-1966896593-project-member] Lock "49d350ff-4932-4759-a3fa-53274c484ae6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.651s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.487673] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f7a7999-ec45-41b6-9ea6-a92cb4537eea tempest-ImagesOneServerNegativeTestJSON-25052438 tempest-ImagesOneServerNegativeTestJSON-25052438-project-member] Lock "35f0c615-3e10-4bdf-aa8d-181f72c1c699" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.157s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.520823] env[63241]: DEBUG nova.compute.manager [req-440a7acd-2e88-4662-ba77-2a7ed487c157 req-47876c89-51ae-4e40-8afb-aeba4234617c service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Received event network-vif-plugged-8a7a1e02-257b-4a7c-936b-c3d9f7596043 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1857.521044] env[63241]: DEBUG oslo_concurrency.lockutils [req-440a7acd-2e88-4662-ba77-2a7ed487c157 req-47876c89-51ae-4e40-8afb-aeba4234617c service nova] Acquiring lock "c0ea8cf6-4023-4093-b0bc-67b02604125a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.521315] env[63241]: DEBUG oslo_concurrency.lockutils [req-440a7acd-2e88-4662-ba77-2a7ed487c157 req-47876c89-51ae-4e40-8afb-aeba4234617c service nova] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.521498] env[63241]: DEBUG oslo_concurrency.lockutils [req-440a7acd-2e88-4662-ba77-2a7ed487c157 req-47876c89-51ae-4e40-8afb-aeba4234617c service nova] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.521666] env[63241]: DEBUG nova.compute.manager [req-440a7acd-2e88-4662-ba77-2a7ed487c157 req-47876c89-51ae-4e40-8afb-aeba4234617c service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] No waiting events found dispatching network-vif-plugged-8a7a1e02-257b-4a7c-936b-c3d9f7596043 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1857.521827] env[63241]: WARNING nova.compute.manager [req-440a7acd-2e88-4662-ba77-2a7ed487c157 req-47876c89-51ae-4e40-8afb-aeba4234617c service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Received unexpected event network-vif-plugged-8a7a1e02-257b-4a7c-936b-c3d9f7596043 for instance with vm_state building and task_state spawning. [ 1857.559305] env[63241]: DEBUG oslo_vmware.api [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821110, 'name': PowerOffVM_Task, 'duration_secs': 0.250885} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.559521] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1857.560285] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1857.560285] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-039e8785-1b62-45be-8329-938d90aebfb2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.603181] env[63241]: DEBUG nova.compute.manager [req-1f695b0a-6719-4e54-8d00-c09a1f9affba req-1539a86e-5c59-4f7d-8442-aa69c77428a4 service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Received event network-vif-plugged-55bc89c7-241d-48af-9915-9dd2f1afd2c0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1857.603400] env[63241]: DEBUG oslo_concurrency.lockutils [req-1f695b0a-6719-4e54-8d00-c09a1f9affba req-1539a86e-5c59-4f7d-8442-aa69c77428a4 service nova] Acquiring lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.603617] env[63241]: DEBUG oslo_concurrency.lockutils [req-1f695b0a-6719-4e54-8d00-c09a1f9affba req-1539a86e-5c59-4f7d-8442-aa69c77428a4 service nova] Lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.603791] env[63241]: DEBUG oslo_concurrency.lockutils [req-1f695b0a-6719-4e54-8d00-c09a1f9affba req-1539a86e-5c59-4f7d-8442-aa69c77428a4 service nova] Lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.604039] env[63241]: DEBUG nova.compute.manager [req-1f695b0a-6719-4e54-8d00-c09a1f9affba req-1539a86e-5c59-4f7d-8442-aa69c77428a4 service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] No waiting events found dispatching network-vif-plugged-55bc89c7-241d-48af-9915-9dd2f1afd2c0 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1857.604245] env[63241]: WARNING nova.compute.manager [req-1f695b0a-6719-4e54-8d00-c09a1f9affba req-1539a86e-5c59-4f7d-8442-aa69c77428a4 service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Received unexpected event network-vif-plugged-55bc89c7-241d-48af-9915-9dd2f1afd2c0 for instance with vm_state active and task_state None. [ 1857.618382] env[63241]: DEBUG nova.network.neutron [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Successfully updated port: 8a7a1e02-257b-4a7c-936b-c3d9f7596043 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1857.631357] env[63241]: DEBUG oslo_concurrency.lockutils [req-72c01c0a-fbc9-451b-b405-cae46e76fc7c req-a0dadb18-ebde-4b5e-8ab5-41ef26519b05 service nova] Releasing lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.664468] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1857.664711] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1857.664913] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Deleting the datastore file [datastore1] 037f539f-1bf1-4897-81b3-08c377b92211 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1857.665216] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58b55efe-cc81-4fe2-bcfb-3fb98d1a7142 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.678515] env[63241]: DEBUG oslo_vmware.api [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for the task: (returnval){ [ 1857.678515] env[63241]: value = "task-1821112" [ 1857.678515] env[63241]: _type = "Task" [ 1857.678515] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.689918] env[63241]: DEBUG oslo_vmware.api [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.768684] env[63241]: DEBUG nova.network.neutron [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Successfully updated port: 55bc89c7-241d-48af-9915-9dd2f1afd2c0 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1858.121441] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "refresh_cache-c0ea8cf6-4023-4093-b0bc-67b02604125a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.121694] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquired lock "refresh_cache-c0ea8cf6-4023-4093-b0bc-67b02604125a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.121935] env[63241]: DEBUG nova.network.neutron [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1858.190183] env[63241]: DEBUG oslo_vmware.api [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Task: {'id': task-1821112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152843} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.190519] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1858.190716] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1858.190820] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1858.191010] env[63241]: INFO nova.compute.manager [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1858.191264] env[63241]: DEBUG oslo.service.loopingcall [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1858.191462] env[63241]: DEBUG nova.compute.manager [-] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1858.191558] env[63241]: DEBUG nova.network.neutron [-] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1858.272733] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.272932] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.273142] env[63241]: DEBUG nova.network.neutron [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1858.396727] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "e28ba013-0bc5-4edc-858d-674980bc8742" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.397021] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.397262] env[63241]: INFO nova.compute.manager [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Rebooting instance [ 1858.666492] env[63241]: DEBUG nova.network.neutron [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1858.913990] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.914193] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.914490] env[63241]: DEBUG nova.network.neutron [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1859.003632] env[63241]: WARNING nova.network.neutron [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] cafd3d43-975a-4836-8948-2f1b47e56666 already exists in list: networks containing: ['cafd3d43-975a-4836-8948-2f1b47e56666']. ignoring it [ 1859.248958] env[63241]: DEBUG nova.network.neutron [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Updating instance_info_cache with network_info: [{"id": "8a7a1e02-257b-4a7c-936b-c3d9f7596043", "address": "fa:16:3e:fd:e9:66", "network": {"id": "a8a8022c-fd6d-4145-af3c-875a85e306e6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-472658781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bfa018174324b20863367a034d512da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a7a1e02-25", "ovs_interfaceid": "8a7a1e02-257b-4a7c-936b-c3d9f7596043", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.321931] env[63241]: DEBUG nova.network.neutron [-] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.555607] env[63241]: DEBUG nova.compute.manager [req-818bf500-96c3-42e7-91e7-3bb785c5cfc4 req-8e73cce9-84a7-4b1b-9f18-29ec1f19f90e service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Received event network-changed-8a7a1e02-257b-4a7c-936b-c3d9f7596043 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1859.555812] env[63241]: DEBUG nova.compute.manager [req-818bf500-96c3-42e7-91e7-3bb785c5cfc4 req-8e73cce9-84a7-4b1b-9f18-29ec1f19f90e service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Refreshing instance network info cache due to event network-changed-8a7a1e02-257b-4a7c-936b-c3d9f7596043. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1859.555998] env[63241]: DEBUG oslo_concurrency.lockutils [req-818bf500-96c3-42e7-91e7-3bb785c5cfc4 req-8e73cce9-84a7-4b1b-9f18-29ec1f19f90e service nova] Acquiring lock "refresh_cache-c0ea8cf6-4023-4093-b0bc-67b02604125a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.635339] env[63241]: DEBUG nova.compute.manager [req-ddf785bd-8d7e-46b3-97aa-0620a22d2870 req-47fe4d35-c363-40ba-9ceb-63392b0e8b4b service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Received event network-changed-55bc89c7-241d-48af-9915-9dd2f1afd2c0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1859.635339] env[63241]: DEBUG nova.compute.manager [req-ddf785bd-8d7e-46b3-97aa-0620a22d2870 req-47fe4d35-c363-40ba-9ceb-63392b0e8b4b service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Refreshing instance network info cache due to event network-changed-55bc89c7-241d-48af-9915-9dd2f1afd2c0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1859.635539] env[63241]: DEBUG oslo_concurrency.lockutils [req-ddf785bd-8d7e-46b3-97aa-0620a22d2870 req-47fe4d35-c363-40ba-9ceb-63392b0e8b4b service nova] Acquiring lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.683586] env[63241]: DEBUG nova.network.neutron [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updating instance_info_cache with network_info: [{"id": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "address": "fa:16:3e:9f:6a:40", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb56e1be-f5", "ovs_interfaceid": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55bc89c7-241d-48af-9915-9dd2f1afd2c0", "address": "fa:16:3e:75:b6:69", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55bc89c7-24", "ovs_interfaceid": "55bc89c7-241d-48af-9915-9dd2f1afd2c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.753611] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Releasing lock "refresh_cache-c0ea8cf6-4023-4093-b0bc-67b02604125a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.754025] env[63241]: DEBUG nova.compute.manager [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Instance network_info: |[{"id": "8a7a1e02-257b-4a7c-936b-c3d9f7596043", "address": "fa:16:3e:fd:e9:66", "network": {"id": "a8a8022c-fd6d-4145-af3c-875a85e306e6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-472658781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bfa018174324b20863367a034d512da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a7a1e02-25", "ovs_interfaceid": "8a7a1e02-257b-4a7c-936b-c3d9f7596043", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1859.754348] env[63241]: DEBUG oslo_concurrency.lockutils [req-818bf500-96c3-42e7-91e7-3bb785c5cfc4 req-8e73cce9-84a7-4b1b-9f18-29ec1f19f90e service nova] Acquired lock "refresh_cache-c0ea8cf6-4023-4093-b0bc-67b02604125a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.754537] env[63241]: DEBUG nova.network.neutron [req-818bf500-96c3-42e7-91e7-3bb785c5cfc4 req-8e73cce9-84a7-4b1b-9f18-29ec1f19f90e service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Refreshing network info cache for port 8a7a1e02-257b-4a7c-936b-c3d9f7596043 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1859.755822] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:e9:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1559ce49-7345-443f-bf02-4bfeb88356ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a7a1e02-257b-4a7c-936b-c3d9f7596043', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1859.767772] env[63241]: DEBUG oslo.service.loopingcall [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1859.769046] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1859.769576] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4689b340-ab35-47cb-a554-095e15340e7e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.786836] env[63241]: DEBUG nova.network.neutron [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance_info_cache with network_info: [{"id": "6be29b8b-a0d6-4346-b774-5faf878f177c", "address": "fa:16:3e:0f:60:27", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be29b8b-a0", "ovs_interfaceid": "6be29b8b-a0d6-4346-b774-5faf878f177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.796558] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1859.796558] env[63241]: value = "task-1821114" [ 1859.796558] env[63241]: _type = "Task" [ 1859.796558] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.806725] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821114, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.824282] env[63241]: INFO nova.compute.manager [-] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Took 1.63 seconds to deallocate network for instance. [ 1860.186450] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.187322] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.187416] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.187678] env[63241]: DEBUG oslo_concurrency.lockutils [req-ddf785bd-8d7e-46b3-97aa-0620a22d2870 req-47fe4d35-c363-40ba-9ceb-63392b0e8b4b service nova] Acquired lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.187862] env[63241]: DEBUG nova.network.neutron [req-ddf785bd-8d7e-46b3-97aa-0620a22d2870 req-47fe4d35-c363-40ba-9ceb-63392b0e8b4b service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Refreshing network info cache for port 55bc89c7-241d-48af-9915-9dd2f1afd2c0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1860.190222] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98cb48c2-292e-4fe2-ad68-b27d17b627ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.208254] env[63241]: DEBUG nova.virt.hardware [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1860.208493] env[63241]: DEBUG nova.virt.hardware [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1860.208661] env[63241]: DEBUG nova.virt.hardware [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1860.208875] env[63241]: DEBUG nova.virt.hardware [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1860.209035] env[63241]: DEBUG nova.virt.hardware [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1860.209186] env[63241]: DEBUG nova.virt.hardware [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1860.209393] env[63241]: DEBUG nova.virt.hardware [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1860.209710] env[63241]: DEBUG nova.virt.hardware [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1860.209835] env[63241]: DEBUG nova.virt.hardware [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1860.209919] env[63241]: DEBUG nova.virt.hardware [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1860.210249] env[63241]: DEBUG nova.virt.hardware [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1860.216711] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Reconfiguring VM to attach interface {{(pid=63241) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1860.218135] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94726596-e2e8-4307-84e5-55f0e0ab8c77 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.237130] env[63241]: DEBUG oslo_vmware.api [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1860.237130] env[63241]: value = "task-1821115" [ 1860.237130] env[63241]: _type = "Task" [ 1860.237130] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.247070] env[63241]: DEBUG oslo_vmware.api [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821115, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.291622] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.292820] env[63241]: DEBUG nova.compute.manager [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1860.294084] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f3bb0e-bead-4acc-80bd-96d167e32a9e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.308917] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821114, 'name': CreateVM_Task, 'duration_secs': 0.419108} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.313830] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1860.318607] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.318607] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.318607] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1860.320152] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c99ff177-5992-47cd-87fc-841c8f37f4c6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.328704] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1860.328704] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c90301-05d5-f87a-bbd6-78e961dbd561" [ 1860.328704] env[63241]: _type = "Task" [ 1860.328704] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.334597] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.335657] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.335657] env[63241]: DEBUG nova.objects.instance [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lazy-loading 'resources' on Instance uuid 037f539f-1bf1-4897-81b3-08c377b92211 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1860.353367] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c90301-05d5-f87a-bbd6-78e961dbd561, 'name': SearchDatastore_Task, 'duration_secs': 0.020777} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.353700] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.353927] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1860.354182] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.354377] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.354550] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1860.354937] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9298465f-0324-44fb-8a45-3a0c9a2866f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.368063] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1860.368453] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1860.369638] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beaccb02-10d8-4a72-8127-0cd51f3adb83 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.377209] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1860.377209] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520f3d6c-3e3c-4081-e709-95069d83590d" [ 1860.377209] env[63241]: _type = "Task" [ 1860.377209] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.385829] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520f3d6c-3e3c-4081-e709-95069d83590d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.620724] env[63241]: DEBUG nova.network.neutron [req-818bf500-96c3-42e7-91e7-3bb785c5cfc4 req-8e73cce9-84a7-4b1b-9f18-29ec1f19f90e service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Updated VIF entry in instance network info cache for port 8a7a1e02-257b-4a7c-936b-c3d9f7596043. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1860.621118] env[63241]: DEBUG nova.network.neutron [req-818bf500-96c3-42e7-91e7-3bb785c5cfc4 req-8e73cce9-84a7-4b1b-9f18-29ec1f19f90e service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Updating instance_info_cache with network_info: [{"id": "8a7a1e02-257b-4a7c-936b-c3d9f7596043", "address": "fa:16:3e:fd:e9:66", "network": {"id": "a8a8022c-fd6d-4145-af3c-875a85e306e6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-472658781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bfa018174324b20863367a034d512da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a7a1e02-25", "ovs_interfaceid": "8a7a1e02-257b-4a7c-936b-c3d9f7596043", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.747957] env[63241]: DEBUG oslo_vmware.api [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821115, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.896324] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520f3d6c-3e3c-4081-e709-95069d83590d, 'name': SearchDatastore_Task, 'duration_secs': 0.010001} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.897557] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5aba2fe-abee-40df-8560-8df0c6c7de60 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.907015] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1860.907015] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523e6eb3-9cd0-2d3a-c9ea-fa8ae10f1afc" [ 1860.907015] env[63241]: _type = "Task" [ 1860.907015] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.923025] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523e6eb3-9cd0-2d3a-c9ea-fa8ae10f1afc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.973243] env[63241]: DEBUG nova.network.neutron [req-ddf785bd-8d7e-46b3-97aa-0620a22d2870 req-47fe4d35-c363-40ba-9ceb-63392b0e8b4b service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updated VIF entry in instance network info cache for port 55bc89c7-241d-48af-9915-9dd2f1afd2c0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1860.973659] env[63241]: DEBUG nova.network.neutron [req-ddf785bd-8d7e-46b3-97aa-0620a22d2870 req-47fe4d35-c363-40ba-9ceb-63392b0e8b4b service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updating instance_info_cache with network_info: [{"id": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "address": "fa:16:3e:9f:6a:40", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb56e1be-f5", "ovs_interfaceid": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55bc89c7-241d-48af-9915-9dd2f1afd2c0", "address": "fa:16:3e:75:b6:69", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55bc89c7-24", "ovs_interfaceid": "55bc89c7-241d-48af-9915-9dd2f1afd2c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.068451] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d4696d-a492-4a91-893e-67214e42b765 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.076896] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4ca285-86f9-4696-a26a-8f6297f35aef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.110680] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4236ee01-3a3c-4aac-ad02-519931a53255 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.118875] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd54f769-6c4e-4349-a65e-9941dbf160ee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.123517] env[63241]: DEBUG oslo_concurrency.lockutils [req-818bf500-96c3-42e7-91e7-3bb785c5cfc4 req-8e73cce9-84a7-4b1b-9f18-29ec1f19f90e service nova] Releasing lock "refresh_cache-c0ea8cf6-4023-4093-b0bc-67b02604125a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.134169] env[63241]: DEBUG nova.compute.provider_tree [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.248263] env[63241]: DEBUG oslo_vmware.api [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821115, 'name': ReconfigVM_Task, 'duration_secs': 0.674404} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.248783] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.249192] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Reconfigured VM to attach interface {{(pid=63241) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1861.327150] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c4dcd6-c3e9-40b5-aa37-ef27b06f1c92 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.337093] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Doing hard reboot of VM {{(pid=63241) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1861.337363] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-cc65c208-7b78-4f17-85c3-d9a4cddbc1a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.344264] env[63241]: DEBUG oslo_vmware.api [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1861.344264] env[63241]: value = "task-1821116" [ 1861.344264] env[63241]: _type = "Task" [ 1861.344264] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.352594] env[63241]: DEBUG oslo_vmware.api [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821116, 'name': ResetVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.503466] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523e6eb3-9cd0-2d3a-c9ea-fa8ae10f1afc, 'name': SearchDatastore_Task, 'duration_secs': 0.021421} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.503466] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.503466] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c0ea8cf6-4023-4093-b0bc-67b02604125a/c0ea8cf6-4023-4093-b0bc-67b02604125a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1861.503466] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa78a356-a84e-4c2a-a533-05161f4304e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.503466] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1861.503466] env[63241]: value = "task-1821117" [ 1861.503466] env[63241]: _type = "Task" [ 1861.503466] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.503466] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.503466] env[63241]: DEBUG oslo_concurrency.lockutils [req-ddf785bd-8d7e-46b3-97aa-0620a22d2870 req-47fe4d35-c363-40ba-9ceb-63392b0e8b4b service nova] Releasing lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.503466] env[63241]: DEBUG nova.compute.manager [req-ddf785bd-8d7e-46b3-97aa-0620a22d2870 req-47fe4d35-c363-40ba-9ceb-63392b0e8b4b service nova] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Received event network-vif-deleted-f206ebca-5602-446b-aa53-e4a3d5686739 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1861.636960] env[63241]: DEBUG nova.scheduler.client.report [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1861.754779] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbd88323-0452-40c4-91a9-587a8e77bd59 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-55bc89c7-241d-48af-9915-9dd2f1afd2c0" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.757s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.859023] env[63241]: DEBUG oslo_vmware.api [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821116, 'name': ResetVM_Task, 'duration_secs': 0.1034} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.859023] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Did hard reboot of VM {{(pid=63241) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1861.859023] env[63241]: DEBUG nova.compute.manager [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1861.860740] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8030a04-c068-4549-abdd-628bea7b4471 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.949888] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821117, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.143091] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.808s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.170977] env[63241]: INFO nova.scheduler.client.report [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Deleted allocations for instance 037f539f-1bf1-4897-81b3-08c377b92211 [ 1862.376210] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bd5bcbb8-138f-48d7-b73a-3393e4131b53 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.979s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.454584] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821117, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582767} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.457018] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c0ea8cf6-4023-4093-b0bc-67b02604125a/c0ea8cf6-4023-4093-b0bc-67b02604125a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1862.457018] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1862.457018] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96e759fc-674c-4c57-9314-6fb418c978da {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.464988] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1862.464988] env[63241]: value = "task-1821118" [ 1862.464988] env[63241]: _type = "Task" [ 1862.464988] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.476020] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821118, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.680484] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3c1c10d1-2ba8-496c-b6d3-e3482c46f43a tempest-VolumesAdminNegativeTest-1637523670 tempest-VolumesAdminNegativeTest-1637523670-project-member] Lock "037f539f-1bf1-4897-81b3-08c377b92211" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.659s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.977241] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821118, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069899} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.981506] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1862.982426] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f0914f-a8f9-4be0-a252-7b9d2ffdea2e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.007461] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] c0ea8cf6-4023-4093-b0bc-67b02604125a/c0ea8cf6-4023-4093-b0bc-67b02604125a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1863.007797] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10a095ac-3225-48bc-85f0-5641e41a41d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.037620] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1863.037620] env[63241]: value = "task-1821119" [ 1863.037620] env[63241]: _type = "Task" [ 1863.037620] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.047831] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821119, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.297906] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "interface-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-55bc89c7-241d-48af-9915-9dd2f1afd2c0" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.298129] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-55bc89c7-241d-48af-9915-9dd2f1afd2c0" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.549961] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821119, 'name': ReconfigVM_Task, 'duration_secs': 0.39724} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.550618] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Reconfigured VM instance instance-0000006a to attach disk [datastore1] c0ea8cf6-4023-4093-b0bc-67b02604125a/c0ea8cf6-4023-4093-b0bc-67b02604125a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1863.554330] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-beb9c7a7-1303-49f0-89b9-8df8658a2525 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.560667] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1863.560667] env[63241]: value = "task-1821120" [ 1863.560667] env[63241]: _type = "Task" [ 1863.560667] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.576711] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821120, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.801924] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.802176] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.803080] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1d73b1-d35b-4831-9360-97f933f04e9c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.828865] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656d19a5-f043-4a3a-98c6-601206911505 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.861105] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Reconfiguring VM to detach interface {{(pid=63241) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1863.861518] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3585b38f-d675-4d7e-b1fb-ef052b03feb5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.882613] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1863.882613] env[63241]: value = "task-1821121" [ 1863.882613] env[63241]: _type = "Task" [ 1863.882613] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.892149] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.073250] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821120, 'name': Rename_Task, 'duration_secs': 0.150401} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.073572] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1864.073834] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-244830bf-18e8-4914-a24a-09da14ee17b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.082735] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1864.082735] env[63241]: value = "task-1821122" [ 1864.082735] env[63241]: _type = "Task" [ 1864.082735] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.101983] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821122, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.396373] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.593750] env[63241]: DEBUG oslo_vmware.api [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821122, 'name': PowerOnVM_Task, 'duration_secs': 0.476198} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.594492] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1864.594492] env[63241]: INFO nova.compute.manager [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Took 7.44 seconds to spawn the instance on the hypervisor. [ 1864.594492] env[63241]: DEBUG nova.compute.manager [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1864.595249] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea40d25-a256-47fa-8d95-59d6ca378fb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.896526] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.119543] env[63241]: INFO nova.compute.manager [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Took 16.34 seconds to build instance. [ 1865.396331] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.623982] env[63241]: DEBUG oslo_concurrency.lockutils [None req-760c7dc7-e0d8-4a9f-bc4a-e6e59a51ae2c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.853s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.828301] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "1e172f73-972e-4401-b358-512f7e03b27f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.828542] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.896236] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.335080] env[63241]: INFO nova.compute.manager [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Detaching volume 8530001a-3c79-454f-a061-a6f03dce1921 [ 1866.387538] env[63241]: INFO nova.virt.block_device [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Attempting to driver detach volume 8530001a-3c79-454f-a061-a6f03dce1921 from mountpoint /dev/sdb [ 1866.387928] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1866.388272] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377189', 'volume_id': '8530001a-3c79-454f-a061-a6f03dce1921', 'name': 'volume-8530001a-3c79-454f-a061-a6f03dce1921', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1e172f73-972e-4401-b358-512f7e03b27f', 'attached_at': '', 'detached_at': '', 'volume_id': '8530001a-3c79-454f-a061-a6f03dce1921', 'serial': '8530001a-3c79-454f-a061-a6f03dce1921'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1866.390075] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2866731-70ab-47d8-bed7-c81fb59be28b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.434338] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55dc334-90a7-4f0f-8663-87256286267a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.438526] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.444584] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447b6bee-5bf0-40ce-8ddb-ea1332e38aed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.473749] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb682c18-ee98-40be-ba0e-8316cb68aa9e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.478603] env[63241]: DEBUG nova.compute.manager [req-d5598633-66d0-4d85-a946-57d8ceb3a1a6 req-e2902d1a-c995-4364-b90a-3b5115526816 service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Received event network-changed-8a7a1e02-257b-4a7c-936b-c3d9f7596043 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1866.478840] env[63241]: DEBUG nova.compute.manager [req-d5598633-66d0-4d85-a946-57d8ceb3a1a6 req-e2902d1a-c995-4364-b90a-3b5115526816 service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Refreshing instance network info cache due to event network-changed-8a7a1e02-257b-4a7c-936b-c3d9f7596043. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1866.479069] env[63241]: DEBUG oslo_concurrency.lockutils [req-d5598633-66d0-4d85-a946-57d8ceb3a1a6 req-e2902d1a-c995-4364-b90a-3b5115526816 service nova] Acquiring lock "refresh_cache-c0ea8cf6-4023-4093-b0bc-67b02604125a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.479247] env[63241]: DEBUG oslo_concurrency.lockutils [req-d5598633-66d0-4d85-a946-57d8ceb3a1a6 req-e2902d1a-c995-4364-b90a-3b5115526816 service nova] Acquired lock "refresh_cache-c0ea8cf6-4023-4093-b0bc-67b02604125a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.479425] env[63241]: DEBUG nova.network.neutron [req-d5598633-66d0-4d85-a946-57d8ceb3a1a6 req-e2902d1a-c995-4364-b90a-3b5115526816 service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Refreshing network info cache for port 8a7a1e02-257b-4a7c-936b-c3d9f7596043 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1866.498352] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] The volume has not been displaced from its original location: [datastore1] volume-8530001a-3c79-454f-a061-a6f03dce1921/volume-8530001a-3c79-454f-a061-a6f03dce1921.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1866.505315] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Reconfiguring VM instance instance-0000004e to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1866.506353] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d15f5f2d-3ab7-4648-ae55-a92a7f38bd01 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.531153] env[63241]: DEBUG oslo_vmware.api [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1866.531153] env[63241]: value = "task-1821123" [ 1866.531153] env[63241]: _type = "Task" [ 1866.531153] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.541825] env[63241]: DEBUG oslo_vmware.api [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821123, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.904539] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.045760] env[63241]: DEBUG oslo_vmware.api [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821123, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.407877] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.538218] env[63241]: DEBUG nova.network.neutron [req-d5598633-66d0-4d85-a946-57d8ceb3a1a6 req-e2902d1a-c995-4364-b90a-3b5115526816 service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Updated VIF entry in instance network info cache for port 8a7a1e02-257b-4a7c-936b-c3d9f7596043. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1867.538983] env[63241]: DEBUG nova.network.neutron [req-d5598633-66d0-4d85-a946-57d8ceb3a1a6 req-e2902d1a-c995-4364-b90a-3b5115526816 service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Updating instance_info_cache with network_info: [{"id": "8a7a1e02-257b-4a7c-936b-c3d9f7596043", "address": "fa:16:3e:fd:e9:66", "network": {"id": "a8a8022c-fd6d-4145-af3c-875a85e306e6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-472658781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4bfa018174324b20863367a034d512da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a7a1e02-25", "ovs_interfaceid": "8a7a1e02-257b-4a7c-936b-c3d9f7596043", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.554174] env[63241]: DEBUG oslo_vmware.api [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821123, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.904023] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.046798] env[63241]: DEBUG oslo_concurrency.lockutils [req-d5598633-66d0-4d85-a946-57d8ceb3a1a6 req-e2902d1a-c995-4364-b90a-3b5115526816 service nova] Releasing lock "refresh_cache-c0ea8cf6-4023-4093-b0bc-67b02604125a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.053980] env[63241]: DEBUG oslo_vmware.api [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821123, 'name': ReconfigVM_Task, 'duration_secs': 1.293151} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.054437] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Reconfigured VM instance instance-0000004e to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1868.059917] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fe81003-7c95-41ca-a0b3-95c6a6381d45 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.079026] env[63241]: DEBUG oslo_vmware.api [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1868.079026] env[63241]: value = "task-1821124" [ 1868.079026] env[63241]: _type = "Task" [ 1868.079026] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.088215] env[63241]: DEBUG oslo_vmware.api [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821124, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.401941] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.589135] env[63241]: DEBUG oslo_vmware.api [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821124, 'name': ReconfigVM_Task, 'duration_secs': 0.192152} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.590816] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377189', 'volume_id': '8530001a-3c79-454f-a061-a6f03dce1921', 'name': 'volume-8530001a-3c79-454f-a061-a6f03dce1921', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1e172f73-972e-4401-b358-512f7e03b27f', 'attached_at': '', 'detached_at': '', 'volume_id': '8530001a-3c79-454f-a061-a6f03dce1921', 'serial': '8530001a-3c79-454f-a061-a6f03dce1921'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1868.902063] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.147747] env[63241]: DEBUG nova.objects.instance [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lazy-loading 'flavor' on Instance uuid 1e172f73-972e-4401-b358-512f7e03b27f {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1869.404274] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.463552] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "98e7f776-b36e-4132-803e-f2272e26c44e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.463796] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "98e7f776-b36e-4132-803e-f2272e26c44e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.500894] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "bf339484-4b96-4326-b035-39783aff4461" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.501143] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "bf339484-4b96-4326-b035-39783aff4461" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.903041] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.966282] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1870.003823] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1870.157781] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ef5ab6a6-c69f-4853-8438-3aa917940346 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.329s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.358184] env[63241]: DEBUG oslo_concurrency.lockutils [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "1e172f73-972e-4401-b358-512f7e03b27f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.358517] env[63241]: DEBUG oslo_concurrency.lockutils [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.358706] env[63241]: DEBUG oslo_concurrency.lockutils [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "1e172f73-972e-4401-b358-512f7e03b27f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.358912] env[63241]: DEBUG oslo_concurrency.lockutils [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.359103] env[63241]: DEBUG oslo_concurrency.lockutils [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.361356] env[63241]: INFO nova.compute.manager [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Terminating instance [ 1870.363147] env[63241]: DEBUG nova.compute.manager [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1870.363356] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1870.364249] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e32fa14-5e14-4ebf-84ec-9dd044510b34 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.372722] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1870.372992] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5178f8c9-df27-4778-8c79-335577dc2525 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.380758] env[63241]: DEBUG oslo_vmware.api [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1870.380758] env[63241]: value = "task-1821125" [ 1870.380758] env[63241]: _type = "Task" [ 1870.380758] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.388462] env[63241]: DEBUG oslo_vmware.api [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.402432] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.489979] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.490311] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.492027] env[63241]: INFO nova.compute.claims [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1870.527304] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.891732] env[63241]: DEBUG oslo_vmware.api [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821125, 'name': PowerOffVM_Task, 'duration_secs': 0.170406} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.892019] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1870.892202] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1870.892457] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd0261c9-0f46-4156-be8b-4db40314ab56 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.902759] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.405545] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.670295] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479b84d6-c20b-42d0-974c-67ba7e2e52bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.677874] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5e08df-90f5-45e2-a6c9-6181f0159588 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.707910] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d9ac5d-9f46-4190-9af9-d8ee94e49dac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.715361] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bba9c26-df81-4efa-b6c1-63aa0a695423 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.728893] env[63241]: DEBUG nova.compute.provider_tree [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1871.905205] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.232905] env[63241]: DEBUG nova.scheduler.client.report [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1872.245199] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "6f3cbd23-30b9-4502-be07-2edd0a701291" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.245507] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "6f3cbd23-30b9-4502-be07-2edd0a701291" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.245786] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "6f3cbd23-30b9-4502-be07-2edd0a701291-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.246256] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "6f3cbd23-30b9-4502-be07-2edd0a701291-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.246256] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "6f3cbd23-30b9-4502-be07-2edd0a701291-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.248679] env[63241]: INFO nova.compute.manager [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Terminating instance [ 1872.250936] env[63241]: DEBUG nova.compute.manager [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1872.251202] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1872.252373] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde111b0-c707-471a-9d2b-b3bab8001d25 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.264699] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1872.265012] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ebf132f-2971-42e9-8625-f7f5abf16d59 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.271971] env[63241]: DEBUG oslo_vmware.api [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1872.271971] env[63241]: value = "task-1821127" [ 1872.271971] env[63241]: _type = "Task" [ 1872.271971] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.282084] env[63241]: DEBUG oslo_vmware.api [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821127, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.406167] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.739797] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.740306] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1872.743196] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.216s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.744626] env[63241]: INFO nova.compute.claims [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1872.783386] env[63241]: DEBUG oslo_vmware.api [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821127, 'name': PowerOffVM_Task, 'duration_secs': 0.194653} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.783637] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1872.783806] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1872.784058] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d782add3-0c99-4c4d-aaf1-f92364029e5b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.906330] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.249234] env[63241]: DEBUG nova.compute.utils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1873.253025] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1873.253025] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1873.320071] env[63241]: DEBUG nova.policy [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ed069bdb22e40409ad6e3ea2da9dd8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4642d232d037477ba8813b56e579d84f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1873.407984] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.573191] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Successfully created port: 58c509e6-4c6b-4a29-9906-ff3258989040 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1873.753990] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1873.907329] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.935442] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbee163b-e11b-4277-af4f-219924f58158 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.943287] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563550eb-19d1-416b-9408-bf1b7e269462 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.974570] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7d1bb0-352c-4c8f-9969-0514068db439 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.982347] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9468f6df-202e-4715-84db-4e8f116f6b56 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.995428] env[63241]: DEBUG nova.compute.provider_tree [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1874.229108] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1874.229397] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1874.229559] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleting the datastore file [datastore1] 6f3cbd23-30b9-4502-be07-2edd0a701291 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1874.229836] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-607a15c2-52c7-4e2a-b240-44a207760ba8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.236389] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1874.236590] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1874.236770] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Deleting the datastore file [datastore1] 1e172f73-972e-4401-b358-512f7e03b27f {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1874.237026] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-941b2d59-425f-409a-b4d2-b61037d21d34 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.240467] env[63241]: DEBUG oslo_vmware.api [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1874.240467] env[63241]: value = "task-1821129" [ 1874.240467] env[63241]: _type = "Task" [ 1874.240467] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.244747] env[63241]: DEBUG oslo_vmware.api [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1874.244747] env[63241]: value = "task-1821130" [ 1874.244747] env[63241]: _type = "Task" [ 1874.244747] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.251586] env[63241]: DEBUG oslo_vmware.api [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.255723] env[63241]: DEBUG oslo_vmware.api [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821130, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.409358] env[63241]: DEBUG oslo_vmware.api [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821121, 'name': ReconfigVM_Task, 'duration_secs': 10.477573} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.409739] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1874.409831] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Reconfigured VM to detach interface {{(pid=63241) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1874.498295] env[63241]: DEBUG nova.scheduler.client.report [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1874.751620] env[63241]: DEBUG oslo_vmware.api [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821129, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141563} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.752281] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1874.752480] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1874.752659] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1874.752832] env[63241]: INFO nova.compute.manager [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Took 2.50 seconds to destroy the instance on the hypervisor. [ 1874.753075] env[63241]: DEBUG oslo.service.loopingcall [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1874.753265] env[63241]: DEBUG nova.compute.manager [-] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1874.753358] env[63241]: DEBUG nova.network.neutron [-] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1874.757719] env[63241]: DEBUG oslo_vmware.api [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821130, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155231} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.758219] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1874.758395] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1874.758570] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1874.758738] env[63241]: INFO nova.compute.manager [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Took 4.40 seconds to destroy the instance on the hypervisor. [ 1874.758952] env[63241]: DEBUG oslo.service.loopingcall [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1874.759138] env[63241]: DEBUG nova.compute.manager [-] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1874.759227] env[63241]: DEBUG nova.network.neutron [-] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1874.767639] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1874.797360] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1874.797734] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1874.797841] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1874.797970] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1874.798154] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1874.798307] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1874.798654] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1874.798725] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1874.798838] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1874.799013] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1874.799210] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1874.800091] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641a8d4e-6686-47d1-b71f-eceb2b7a231a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.808048] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd761c0-1931-47cc-b7d1-0678ec438acc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.004212] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.261s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.005197] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1875.206211] env[63241]: DEBUG nova.compute.manager [req-c50a1078-d703-44b6-876c-2e67f689c3f5 req-955b04e2-e4b9-40b8-848a-ccdae8b398ba service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Received event network-vif-deleted-240bd985-f430-47f9-83a8-287f0c345a36 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1875.206516] env[63241]: INFO nova.compute.manager [req-c50a1078-d703-44b6-876c-2e67f689c3f5 req-955b04e2-e4b9-40b8-848a-ccdae8b398ba service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Neutron deleted interface 240bd985-f430-47f9-83a8-287f0c345a36; detaching it from the instance and deleting it from the info cache [ 1875.206736] env[63241]: DEBUG nova.network.neutron [req-c50a1078-d703-44b6-876c-2e67f689c3f5 req-955b04e2-e4b9-40b8-848a-ccdae8b398ba service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.252808] env[63241]: DEBUG nova.compute.manager [req-ad83eff6-8901-4aee-9ef2-5c9fcf1a8bf6 req-a00de40c-687f-45a6-bf83-34aba4314056 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Received event network-vif-deleted-fbeb829e-4c31-429b-bdb0-ecb7331ef4ea {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1875.253095] env[63241]: INFO nova.compute.manager [req-ad83eff6-8901-4aee-9ef2-5c9fcf1a8bf6 req-a00de40c-687f-45a6-bf83-34aba4314056 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Neutron deleted interface fbeb829e-4c31-429b-bdb0-ecb7331ef4ea; detaching it from the instance and deleting it from the info cache [ 1875.253274] env[63241]: DEBUG nova.network.neutron [req-ad83eff6-8901-4aee-9ef2-5c9fcf1a8bf6 req-a00de40c-687f-45a6-bf83-34aba4314056 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.510616] env[63241]: DEBUG nova.compute.utils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1875.513494] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1875.513698] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1875.558139] env[63241]: DEBUG nova.policy [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ed069bdb22e40409ad6e3ea2da9dd8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4642d232d037477ba8813b56e579d84f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1875.573198] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Successfully updated port: 58c509e6-4c6b-4a29-9906-ff3258989040 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1875.683618] env[63241]: DEBUG nova.network.neutron [-] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.705262] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.705262] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquired lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.705393] env[63241]: DEBUG nova.network.neutron [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1875.709048] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b124d33-f2e0-43b8-acf9-113414329f11 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.719456] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f99e1aef-ba0c-4c89-a22b-3115ddb33dc5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.729915] env[63241]: DEBUG nova.network.neutron [-] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.753492] env[63241]: DEBUG nova.compute.manager [req-c50a1078-d703-44b6-876c-2e67f689c3f5 req-955b04e2-e4b9-40b8-848a-ccdae8b398ba service nova] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Detach interface failed, port_id=240bd985-f430-47f9-83a8-287f0c345a36, reason: Instance 6f3cbd23-30b9-4502-be07-2edd0a701291 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1875.755522] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2ef86d6-5548-4f19-8a0f-0d05a3b02fd6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.765017] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54390d47-5dab-4308-b545-b8b76b20c70e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.796133] env[63241]: DEBUG nova.compute.manager [req-ad83eff6-8901-4aee-9ef2-5c9fcf1a8bf6 req-a00de40c-687f-45a6-bf83-34aba4314056 service nova] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Detach interface failed, port_id=fbeb829e-4c31-429b-bdb0-ecb7331ef4ea, reason: Instance 1e172f73-972e-4401-b358-512f7e03b27f could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1875.822664] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Successfully created port: 5c215984-3f1e-41db-966c-17d3c097a862 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1876.014900] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1876.076637] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "refresh_cache-98e7f776-b36e-4132-803e-f2272e26c44e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.076800] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "refresh_cache-98e7f776-b36e-4132-803e-f2272e26c44e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.076954] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1876.083467] env[63241]: DEBUG oslo_concurrency.lockutils [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.083955] env[63241]: DEBUG oslo_concurrency.lockutils [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.083955] env[63241]: DEBUG oslo_concurrency.lockutils [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.084107] env[63241]: DEBUG oslo_concurrency.lockutils [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.084283] env[63241]: DEBUG oslo_concurrency.lockutils [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.086368] env[63241]: INFO nova.compute.manager [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Terminating instance [ 1876.088052] env[63241]: DEBUG nova.compute.manager [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1876.088250] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1876.089145] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb09c2f4-f521-4890-a3b6-c891743b5e36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.097079] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1876.097303] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81792282-a66a-437b-b823-3e491ac07d13 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.103321] env[63241]: DEBUG oslo_vmware.api [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1876.103321] env[63241]: value = "task-1821131" [ 1876.103321] env[63241]: _type = "Task" [ 1876.103321] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.111482] env[63241]: DEBUG oslo_vmware.api [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821131, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.187425] env[63241]: INFO nova.compute.manager [-] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Took 1.43 seconds to deallocate network for instance. [ 1876.232508] env[63241]: INFO nova.compute.manager [-] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Took 1.47 seconds to deallocate network for instance. [ 1876.499373] env[63241]: INFO nova.network.neutron [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Port 55bc89c7-241d-48af-9915-9dd2f1afd2c0 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1876.499743] env[63241]: DEBUG nova.network.neutron [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updating instance_info_cache with network_info: [{"id": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "address": "fa:16:3e:9f:6a:40", "network": {"id": "cafd3d43-975a-4836-8948-2f1b47e56666", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1286345479-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ecafb0abbdc74501b22b20b797c4c60c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb56e1be-f5", "ovs_interfaceid": "db56e1be-f5b4-4531-8573-93fe90bc8b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1876.614061] env[63241]: DEBUG oslo_vmware.api [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821131, 'name': PowerOffVM_Task, 'duration_secs': 0.221704} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.615072] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1876.617084] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1876.617276] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1876.617857] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39c672a2-772d-4df2-95bb-35f84a69901e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.694026] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.694294] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.694515] env[63241]: DEBUG nova.objects.instance [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lazy-loading 'resources' on Instance uuid 6f3cbd23-30b9-4502-be07-2edd0a701291 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1876.706643] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1876.706853] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1876.707049] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleting the datastore file [datastore1] d7d5b5a1-bfe9-43a1-b8f1-0a0048562530 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1876.707308] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7f7fcd9-6afd-4359-b161-fa98602b8d99 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.716339] env[63241]: DEBUG oslo_vmware.api [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1876.716339] env[63241]: value = "task-1821133" [ 1876.716339] env[63241]: _type = "Task" [ 1876.716339] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.724375] env[63241]: DEBUG oslo_vmware.api [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821133, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.742149] env[63241]: DEBUG oslo_concurrency.lockutils [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.743504] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Updating instance_info_cache with network_info: [{"id": "58c509e6-4c6b-4a29-9906-ff3258989040", "address": "fa:16:3e:5e:d1:d0", "network": {"id": "79a57807-64a6-47dd-be67-52202730f6e8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1001588857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4642d232d037477ba8813b56e579d84f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58c509e6-4c", "ovs_interfaceid": "58c509e6-4c6b-4a29-9906-ff3258989040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.002200] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Releasing lock "refresh_cache-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.023624] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1877.047894] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1877.048156] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1877.048318] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1877.048504] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1877.048651] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1877.048838] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1877.049053] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1877.049233] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1877.049431] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1877.049601] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1877.049773] env[63241]: DEBUG nova.virt.hardware [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1877.050620] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c687a52-31e9-4954-9df6-54627990d821 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.059150] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8203b53-0ea7-49a1-9c50-6234ce0693d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.176994] env[63241]: DEBUG nova.compute.manager [req-2c39d983-bbff-44d5-9e43-8578befd2a24 req-92d8e8a0-6710-43e9-96d8-f2c5d5787994 service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] Received event network-vif-plugged-5c215984-3f1e-41db-966c-17d3c097a862 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1877.177233] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c39d983-bbff-44d5-9e43-8578befd2a24 req-92d8e8a0-6710-43e9-96d8-f2c5d5787994 service nova] Acquiring lock "bf339484-4b96-4326-b035-39783aff4461-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.177516] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c39d983-bbff-44d5-9e43-8578befd2a24 req-92d8e8a0-6710-43e9-96d8-f2c5d5787994 service nova] Lock "bf339484-4b96-4326-b035-39783aff4461-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.177709] env[63241]: DEBUG oslo_concurrency.lockutils [req-2c39d983-bbff-44d5-9e43-8578befd2a24 req-92d8e8a0-6710-43e9-96d8-f2c5d5787994 service nova] Lock "bf339484-4b96-4326-b035-39783aff4461-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.177879] env[63241]: DEBUG nova.compute.manager [req-2c39d983-bbff-44d5-9e43-8578befd2a24 req-92d8e8a0-6710-43e9-96d8-f2c5d5787994 service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] No waiting events found dispatching network-vif-plugged-5c215984-3f1e-41db-966c-17d3c097a862 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1877.178057] env[63241]: WARNING nova.compute.manager [req-2c39d983-bbff-44d5-9e43-8578befd2a24 req-92d8e8a0-6710-43e9-96d8-f2c5d5787994 service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] Received unexpected event network-vif-plugged-5c215984-3f1e-41db-966c-17d3c097a862 for instance with vm_state building and task_state spawning. [ 1877.228619] env[63241]: DEBUG oslo_vmware.api [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821133, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160078} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.228862] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1877.229080] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1877.229269] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1877.229441] env[63241]: INFO nova.compute.manager [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1877.229679] env[63241]: DEBUG oslo.service.loopingcall [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1877.229873] env[63241]: DEBUG nova.compute.manager [-] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1877.229969] env[63241]: DEBUG nova.network.neutron [-] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1877.240605] env[63241]: DEBUG nova.compute.manager [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Received event network-vif-plugged-58c509e6-4c6b-4a29-9906-ff3258989040 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1877.240828] env[63241]: DEBUG oslo_concurrency.lockutils [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] Acquiring lock "98e7f776-b36e-4132-803e-f2272e26c44e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.241077] env[63241]: DEBUG oslo_concurrency.lockutils [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] Lock "98e7f776-b36e-4132-803e-f2272e26c44e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.241253] env[63241]: DEBUG oslo_concurrency.lockutils [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] Lock "98e7f776-b36e-4132-803e-f2272e26c44e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.241419] env[63241]: DEBUG nova.compute.manager [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] No waiting events found dispatching network-vif-plugged-58c509e6-4c6b-4a29-9906-ff3258989040 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1877.241614] env[63241]: WARNING nova.compute.manager [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Received unexpected event network-vif-plugged-58c509e6-4c6b-4a29-9906-ff3258989040 for instance with vm_state building and task_state spawning. [ 1877.241733] env[63241]: DEBUG nova.compute.manager [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Received event network-changed-58c509e6-4c6b-4a29-9906-ff3258989040 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1877.241882] env[63241]: DEBUG nova.compute.manager [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Refreshing instance network info cache due to event network-changed-58c509e6-4c6b-4a29-9906-ff3258989040. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1877.242062] env[63241]: DEBUG oslo_concurrency.lockutils [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] Acquiring lock "refresh_cache-98e7f776-b36e-4132-803e-f2272e26c44e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.246533] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "refresh_cache-98e7f776-b36e-4132-803e-f2272e26c44e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.246643] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Instance network_info: |[{"id": "58c509e6-4c6b-4a29-9906-ff3258989040", "address": "fa:16:3e:5e:d1:d0", "network": {"id": "79a57807-64a6-47dd-be67-52202730f6e8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1001588857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4642d232d037477ba8813b56e579d84f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58c509e6-4c", "ovs_interfaceid": "58c509e6-4c6b-4a29-9906-ff3258989040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1877.246880] env[63241]: DEBUG oslo_concurrency.lockutils [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] Acquired lock "refresh_cache-98e7f776-b36e-4132-803e-f2272e26c44e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.247064] env[63241]: DEBUG nova.network.neutron [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Refreshing network info cache for port 58c509e6-4c6b-4a29-9906-ff3258989040 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1877.248185] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:d1:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e885ebd4-93ca-4e9e-8889-0f16bd91e61e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58c509e6-4c6b-4a29-9906-ff3258989040', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1877.255804] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Creating folder: Project (4642d232d037477ba8813b56e579d84f). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1877.260930] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-438afe6d-0fde-49a7-8654-76d6f1021074 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.272885] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Created folder: Project (4642d232d037477ba8813b56e579d84f) in parent group-v376927. [ 1877.273058] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Creating folder: Instances. Parent ref: group-v377207. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1877.275376] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e628140a-2712-4c2f-b492-5651f3ecb8c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.286091] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Created folder: Instances in parent group-v377207. [ 1877.286339] env[63241]: DEBUG oslo.service.loopingcall [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1877.288755] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1877.289751] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-044f0408-8ae6-4bea-9c74-6d81464de9d8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.310792] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1877.310792] env[63241]: value = "task-1821136" [ 1877.310792] env[63241]: _type = "Task" [ 1877.310792] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.324139] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821136, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.404479] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a09b28-3922-4c02-ba02-9807892f4328 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.411979] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc1c21a-6312-46ab-8cb9-b613a245d415 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.445802] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed02b58b-3cd3-4ef2-acfa-39319b01da7e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.456158] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000fe24c-4a07-437f-8b7f-af0fefe01b9e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.470518] env[63241]: DEBUG nova.compute.provider_tree [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1877.506594] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b8619bea-fdc5-4576-9ae2-b3ead5407b9e tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "interface-d7d5b5a1-bfe9-43a1-b8f1-0a0048562530-55bc89c7-241d-48af-9915-9dd2f1afd2c0" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 14.208s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.653560] env[63241]: DEBUG nova.network.neutron [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Updated VIF entry in instance network info cache for port 58c509e6-4c6b-4a29-9906-ff3258989040. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1877.653981] env[63241]: DEBUG nova.network.neutron [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Updating instance_info_cache with network_info: [{"id": "58c509e6-4c6b-4a29-9906-ff3258989040", "address": "fa:16:3e:5e:d1:d0", "network": {"id": "79a57807-64a6-47dd-be67-52202730f6e8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1001588857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4642d232d037477ba8813b56e579d84f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58c509e6-4c", "ovs_interfaceid": "58c509e6-4c6b-4a29-9906-ff3258989040", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.736971] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Successfully updated port: 5c215984-3f1e-41db-966c-17d3c097a862 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1877.770340] env[63241]: DEBUG nova.compute.manager [req-eea7af45-bf17-4c67-9dfa-21654004147f req-8d76c723-9e7d-432f-87de-1bd12508063a service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] Received event network-changed-5c215984-3f1e-41db-966c-17d3c097a862 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1877.770623] env[63241]: DEBUG nova.compute.manager [req-eea7af45-bf17-4c67-9dfa-21654004147f req-8d76c723-9e7d-432f-87de-1bd12508063a service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] Refreshing instance network info cache due to event network-changed-5c215984-3f1e-41db-966c-17d3c097a862. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1877.770957] env[63241]: DEBUG oslo_concurrency.lockutils [req-eea7af45-bf17-4c67-9dfa-21654004147f req-8d76c723-9e7d-432f-87de-1bd12508063a service nova] Acquiring lock "refresh_cache-bf339484-4b96-4326-b035-39783aff4461" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.771192] env[63241]: DEBUG oslo_concurrency.lockutils [req-eea7af45-bf17-4c67-9dfa-21654004147f req-8d76c723-9e7d-432f-87de-1bd12508063a service nova] Acquired lock "refresh_cache-bf339484-4b96-4326-b035-39783aff4461" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.771422] env[63241]: DEBUG nova.network.neutron [req-eea7af45-bf17-4c67-9dfa-21654004147f req-8d76c723-9e7d-432f-87de-1bd12508063a service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] Refreshing network info cache for port 5c215984-3f1e-41db-966c-17d3c097a862 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1877.823079] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821136, 'name': CreateVM_Task, 'duration_secs': 0.400848} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.823079] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1877.823079] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.823079] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.823079] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1877.823079] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faa53e7c-f9a8-434e-98c8-a87dc7a80d72 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.827364] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1877.827364] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52dee47a-33b3-e490-7ce6-39c49ad67647" [ 1877.827364] env[63241]: _type = "Task" [ 1877.827364] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.834718] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dee47a-33b3-e490-7ce6-39c49ad67647, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.974072] env[63241]: DEBUG nova.scheduler.client.report [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1878.156965] env[63241]: DEBUG oslo_concurrency.lockutils [req-2b7a89f8-dfb2-4f08-aeda-d00b6fb0b74f req-58ba2828-2c6d-434d-a8a7-910b917d788d service nova] Releasing lock "refresh_cache-98e7f776-b36e-4132-803e-f2272e26c44e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.243320] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "refresh_cache-bf339484-4b96-4326-b035-39783aff4461" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.304288] env[63241]: DEBUG nova.network.neutron [req-eea7af45-bf17-4c67-9dfa-21654004147f req-8d76c723-9e7d-432f-87de-1bd12508063a service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1878.343021] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dee47a-33b3-e490-7ce6-39c49ad67647, 'name': SearchDatastore_Task, 'duration_secs': 0.018969} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.343383] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.343637] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1878.343885] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.344057] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.344253] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1878.344543] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15215f51-8cfb-48e4-8c8b-1c91a854c4e8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.353522] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1878.353690] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1878.354418] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2aa9adcf-3399-4f40-981e-2d4ab73c4965 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.360564] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1878.360564] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52bfc1b0-3411-1d6d-efae-949c33c8f7fc" [ 1878.360564] env[63241]: _type = "Task" [ 1878.360564] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.368400] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52bfc1b0-3411-1d6d-efae-949c33c8f7fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.394743] env[63241]: DEBUG nova.network.neutron [req-eea7af45-bf17-4c67-9dfa-21654004147f req-8d76c723-9e7d-432f-87de-1bd12508063a service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.478190] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.784s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.480427] env[63241]: DEBUG oslo_concurrency.lockutils [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.738s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.481173] env[63241]: DEBUG nova.objects.instance [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lazy-loading 'resources' on Instance uuid 1e172f73-972e-4401-b358-512f7e03b27f {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1878.503888] env[63241]: INFO nova.scheduler.client.report [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleted allocations for instance 6f3cbd23-30b9-4502-be07-2edd0a701291 [ 1878.600460] env[63241]: DEBUG nova.network.neutron [-] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.871837] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52bfc1b0-3411-1d6d-efae-949c33c8f7fc, 'name': SearchDatastore_Task, 'duration_secs': 0.008369} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.872300] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93a85095-e7d9-4cdc-b332-62773bfe204b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.877935] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1878.877935] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522ee3e5-f04c-8942-0b91-5521aaec1259" [ 1878.877935] env[63241]: _type = "Task" [ 1878.877935] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.884586] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522ee3e5-f04c-8942-0b91-5521aaec1259, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.897178] env[63241]: DEBUG oslo_concurrency.lockutils [req-eea7af45-bf17-4c67-9dfa-21654004147f req-8d76c723-9e7d-432f-87de-1bd12508063a service nova] Releasing lock "refresh_cache-bf339484-4b96-4326-b035-39783aff4461" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.897510] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "refresh_cache-bf339484-4b96-4326-b035-39783aff4461" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.897668] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1879.014794] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18dd5d0d-2508-4802-8bdf-1f7f4b5e268a tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "6f3cbd23-30b9-4502-be07-2edd0a701291" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.769s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.103879] env[63241]: INFO nova.compute.manager [-] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Took 1.87 seconds to deallocate network for instance. [ 1879.140378] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3307a3-54cd-4d8f-b969-3a2bff49db38 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.148210] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c7b9ee-bfa1-407f-b837-c8f8e4016d1d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.179801] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d8675d-faac-43a4-9088-d460cafe6e83 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.187268] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3df20b2-97d1-4ac1-9491-7218396a11d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.200695] env[63241]: DEBUG nova.compute.provider_tree [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1879.270449] env[63241]: DEBUG nova.compute.manager [req-3e88675c-89cc-4027-9e5e-2094b1877e45 req-c396cb48-7826-4ed6-9936-f34970f3d089 service nova] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Received event network-vif-deleted-db56e1be-f5b4-4531-8573-93fe90bc8b34 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1879.388397] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522ee3e5-f04c-8942-0b91-5521aaec1259, 'name': SearchDatastore_Task, 'duration_secs': 0.009296} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.388729] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.388997] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 98e7f776-b36e-4132-803e-f2272e26c44e/98e7f776-b36e-4132-803e-f2272e26c44e.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1879.389311] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c81c8e6-c260-4cba-82b1-7c1bc98efd54 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.396236] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1879.396236] env[63241]: value = "task-1821137" [ 1879.396236] env[63241]: _type = "Task" [ 1879.396236] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.405703] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.440192] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1879.524421] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.524421] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.612504] env[63241]: DEBUG oslo_concurrency.lockutils [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.650440] env[63241]: DEBUG nova.network.neutron [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Updating instance_info_cache with network_info: [{"id": "5c215984-3f1e-41db-966c-17d3c097a862", "address": "fa:16:3e:49:d4:68", "network": {"id": "79a57807-64a6-47dd-be67-52202730f6e8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1001588857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4642d232d037477ba8813b56e579d84f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c215984-3f", "ovs_interfaceid": "5c215984-3f1e-41db-966c-17d3c097a862", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.704194] env[63241]: DEBUG nova.scheduler.client.report [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1879.789471] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.789754] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.908103] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821137, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.026016] env[63241]: INFO nova.compute.manager [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Detaching volume 9d900cbe-b561-4b8b-b228-5471bffb1998 [ 1880.056828] env[63241]: INFO nova.virt.block_device [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Attempting to driver detach volume 9d900cbe-b561-4b8b-b228-5471bffb1998 from mountpoint /dev/sdb [ 1880.056828] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1880.056828] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377187', 'volume_id': '9d900cbe-b561-4b8b-b228-5471bffb1998', 'name': 'volume-9d900cbe-b561-4b8b-b228-5471bffb1998', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '01af6dc5-e0e7-4f8b-ad07-73af80c32577', 'attached_at': '', 'detached_at': '', 'volume_id': '9d900cbe-b561-4b8b-b228-5471bffb1998', 'serial': '9d900cbe-b561-4b8b-b228-5471bffb1998'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1880.057480] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f728087b-6615-4dfe-b28f-349bc4fc03e8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.082638] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa35694-3143-405d-836c-827d755cfa52 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.088457] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe4d5bf-db52-45a0-9827-aab05987cd07 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.108809] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376dbd3f-4bed-4cc0-a1d2-cac9f51ecd3b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.123473] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] The volume has not been displaced from its original location: [datastore1] volume-9d900cbe-b561-4b8b-b228-5471bffb1998/volume-9d900cbe-b561-4b8b-b228-5471bffb1998.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1880.128798] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Reconfiguring VM instance instance-00000056 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1880.129118] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab141476-b1e8-4226-a997-b5d3e52dc31c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.146196] env[63241]: DEBUG oslo_vmware.api [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1880.146196] env[63241]: value = "task-1821138" [ 1880.146196] env[63241]: _type = "Task" [ 1880.146196] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.153772] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "refresh_cache-bf339484-4b96-4326-b035-39783aff4461" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.154071] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Instance network_info: |[{"id": "5c215984-3f1e-41db-966c-17d3c097a862", "address": "fa:16:3e:49:d4:68", "network": {"id": "79a57807-64a6-47dd-be67-52202730f6e8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1001588857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4642d232d037477ba8813b56e579d84f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c215984-3f", "ovs_interfaceid": "5c215984-3f1e-41db-966c-17d3c097a862", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1880.154323] env[63241]: DEBUG oslo_vmware.api [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821138, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.154652] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:d4:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e885ebd4-93ca-4e9e-8889-0f16bd91e61e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c215984-3f1e-41db-966c-17d3c097a862', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1880.161936] env[63241]: DEBUG oslo.service.loopingcall [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1880.162139] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf339484-4b96-4326-b035-39783aff4461] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1880.162339] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d7f3c30-bd0e-4d61-ba71-7629870fcc5c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.180858] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1880.180858] env[63241]: value = "task-1821139" [ 1880.180858] env[63241]: _type = "Task" [ 1880.180858] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.188051] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821139, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.209207] env[63241]: DEBUG oslo_concurrency.lockutils [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.211514] env[63241]: DEBUG oslo_concurrency.lockutils [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.599s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.211726] env[63241]: DEBUG nova.objects.instance [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'resources' on Instance uuid d7d5b5a1-bfe9-43a1-b8f1-0a0048562530 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1880.232418] env[63241]: INFO nova.scheduler.client.report [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Deleted allocations for instance 1e172f73-972e-4401-b358-512f7e03b27f [ 1880.293832] env[63241]: DEBUG nova.compute.utils [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1880.408815] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530878} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.409099] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 98e7f776-b36e-4132-803e-f2272e26c44e/98e7f776-b36e-4132-803e-f2272e26c44e.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1880.409322] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1880.409573] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4321c89-e52b-466f-855a-cedbc629195c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.416230] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1880.416230] env[63241]: value = "task-1821140" [ 1880.416230] env[63241]: _type = "Task" [ 1880.416230] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.425978] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821140, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.473386] env[63241]: DEBUG oslo_concurrency.lockutils [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.473745] env[63241]: DEBUG oslo_concurrency.lockutils [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.655974] env[63241]: DEBUG oslo_vmware.api [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821138, 'name': ReconfigVM_Task, 'duration_secs': 0.242577} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.656169] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Reconfigured VM instance instance-00000056 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1880.661052] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d45937aa-3da9-44b5-affb-4bb990bf246b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.675627] env[63241]: DEBUG oslo_vmware.api [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1880.675627] env[63241]: value = "task-1821141" [ 1880.675627] env[63241]: _type = "Task" [ 1880.675627] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.683357] env[63241]: DEBUG oslo_vmware.api [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821141, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.690619] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821139, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.740030] env[63241]: DEBUG oslo_concurrency.lockutils [None req-96934d7a-09c1-4b93-b683-087cb3d1d38a tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "1e172f73-972e-4401-b358-512f7e03b27f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.381s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.796783] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.841134] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1880.841403] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1880.871319] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29bea94d-9db2-4f12-9a92-2a5df715070d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.879475] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6148cd-f00e-4f45-b6dd-76e747b9ebb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.910919] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797a5a62-ca2c-4e86-9925-c802704bffb5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.922659] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc8cc42-f309-4496-88b4-cd9435f6d135 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.931016] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821140, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065229} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.939122] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1880.939697] env[63241]: DEBUG nova.compute.provider_tree [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1880.941823] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e1aed5-2356-45b3-ad39-036e24071090 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.966659] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 98e7f776-b36e-4132-803e-f2272e26c44e/98e7f776-b36e-4132-803e-f2272e26c44e.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1880.967269] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cdc2911-90e9-413c-8e53-4304a0d461de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.982639] env[63241]: INFO nova.compute.manager [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Detaching volume a768a43d-46e3-4b6e-b741-89f8691d110a [ 1880.991099] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1880.991099] env[63241]: value = "task-1821142" [ 1880.991099] env[63241]: _type = "Task" [ 1880.991099] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.999760] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821142, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.020170] env[63241]: INFO nova.virt.block_device [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Attempting to driver detach volume a768a43d-46e3-4b6e-b741-89f8691d110a from mountpoint /dev/sdb [ 1881.020508] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1881.020772] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377193', 'volume_id': 'a768a43d-46e3-4b6e-b741-89f8691d110a', 'name': 'volume-a768a43d-46e3-4b6e-b741-89f8691d110a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e62f49f0-370d-4b5d-ab43-72e0e6238432', 'attached_at': '', 'detached_at': '', 'volume_id': 'a768a43d-46e3-4b6e-b741-89f8691d110a', 'serial': 'a768a43d-46e3-4b6e-b741-89f8691d110a'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1881.021824] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085f74d0-d1a6-428e-8c42-94a6aa6c3e25 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.046393] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9ea8f3-5e8b-43fe-a7ef-1275ff091849 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.054130] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c663fcb-fc90-487d-95c9-b11071789b80 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.075723] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8bf8ea-6b1f-4695-95fe-060354fd9705 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.092903] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] The volume has not been displaced from its original location: [datastore1] volume-a768a43d-46e3-4b6e-b741-89f8691d110a/volume-a768a43d-46e3-4b6e-b741-89f8691d110a.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1881.098160] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Reconfiguring VM instance instance-0000005b to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1881.098487] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-995451e6-51ec-4e97-b3fb-227bef9b64c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.116254] env[63241]: DEBUG oslo_vmware.api [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1881.116254] env[63241]: value = "task-1821143" [ 1881.116254] env[63241]: _type = "Task" [ 1881.116254] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.124631] env[63241]: DEBUG oslo_vmware.api [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821143, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.187944] env[63241]: DEBUG oslo_vmware.api [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821141, 'name': ReconfigVM_Task, 'duration_secs': 0.148943} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.192731] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377187', 'volume_id': '9d900cbe-b561-4b8b-b228-5471bffb1998', 'name': 'volume-9d900cbe-b561-4b8b-b228-5471bffb1998', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '01af6dc5-e0e7-4f8b-ad07-73af80c32577', 'attached_at': '', 'detached_at': '', 'volume_id': '9d900cbe-b561-4b8b-b228-5471bffb1998', 'serial': '9d900cbe-b561-4b8b-b228-5471bffb1998'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1881.202088] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821139, 'name': CreateVM_Task, 'duration_secs': 0.790486} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.202305] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf339484-4b96-4326-b035-39783aff4461] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1881.203162] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.203485] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.203794] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1881.204127] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d59d9a6f-f35b-4c6b-8a6f-fb7b386e62b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.210546] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1881.210546] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e0c857-b206-7b1e-1bb2-e0f25333520c" [ 1881.210546] env[63241]: _type = "Task" [ 1881.210546] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.221322] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e0c857-b206-7b1e-1bb2-e0f25333520c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.347937] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1881.348149] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1881.445608] env[63241]: DEBUG nova.scheduler.client.report [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1881.501790] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.626395] env[63241]: DEBUG oslo_vmware.api [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821143, 'name': ReconfigVM_Task, 'duration_secs': 0.243067} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.626685] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Reconfigured VM instance instance-0000005b to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1881.631829] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3f9a892-2c99-42d6-9b21-4fa4562589d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.646556] env[63241]: DEBUG oslo_vmware.api [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1881.646556] env[63241]: value = "task-1821145" [ 1881.646556] env[63241]: _type = "Task" [ 1881.646556] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.654648] env[63241]: DEBUG oslo_vmware.api [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821145, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.721513] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e0c857-b206-7b1e-1bb2-e0f25333520c, 'name': SearchDatastore_Task, 'duration_secs': 0.009881} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.721668] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.721907] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1881.722164] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.722310] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.722493] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1881.722753] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44012fe4-dc5a-4fef-a487-2f227ed81aea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.731539] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1881.731720] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1881.732449] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3dbb406-1b5c-4b0c-9931-ea8219ab16d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.737613] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1881.737613] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b105f0-e49e-7581-c12f-542a9187881a" [ 1881.737613] env[63241]: _type = "Task" [ 1881.737613] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.741963] env[63241]: DEBUG nova.objects.instance [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lazy-loading 'flavor' on Instance uuid 01af6dc5-e0e7-4f8b-ad07-73af80c32577 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1881.748589] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b105f0-e49e-7581-c12f-542a9187881a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.869877] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.870100] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.870395] env[63241]: INFO nova.compute.manager [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Attaching volume ddd89002-2b7d-4b7a-a368-0b2fe43b975a to /dev/sdb [ 1881.877812] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.877944] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.878098] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1881.900314] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca83b29c-72ca-4872-9bf1-12a191d1501c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.908708] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83c5d53-6d7f-4957-b57c-73d795d9cb59 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.921770] env[63241]: DEBUG nova.virt.block_device [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Updating existing volume attachment record: 30b92a5c-9a6c-4894-ad24-5bca890bf1be {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1881.951058] env[63241]: DEBUG oslo_concurrency.lockutils [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.739s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.972505] env[63241]: INFO nova.scheduler.client.report [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleted allocations for instance d7d5b5a1-bfe9-43a1-b8f1-0a0048562530 [ 1882.001583] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821142, 'name': ReconfigVM_Task, 'duration_secs': 0.628303} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.001850] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 98e7f776-b36e-4132-803e-f2272e26c44e/98e7f776-b36e-4132-803e-f2272e26c44e.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1882.002528] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-442a886a-2c81-486a-a797-5662815156df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.008241] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1882.008241] env[63241]: value = "task-1821146" [ 1882.008241] env[63241]: _type = "Task" [ 1882.008241] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.015645] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821146, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.157286] env[63241]: DEBUG oslo_vmware.api [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821145, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.249208] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b105f0-e49e-7581-c12f-542a9187881a, 'name': SearchDatastore_Task, 'duration_secs': 0.017459} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.251391] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba75a0b6-3582-4eaa-8b38-b0a7a9df2f84 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.257363] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1882.257363] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e1368f-10c7-c763-3286-0ab038fecfb8" [ 1882.257363] env[63241]: _type = "Task" [ 1882.257363] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.265968] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e1368f-10c7-c763-3286-0ab038fecfb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.479467] env[63241]: DEBUG oslo_concurrency.lockutils [None req-efea0de8-a083-496a-9758-15e167451c84 tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "d7d5b5a1-bfe9-43a1-b8f1-0a0048562530" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.396s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.521145] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821146, 'name': Rename_Task, 'duration_secs': 0.153409} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.521455] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1882.522017] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c42a153a-7016-4b8a-bfcc-aa9694cf1062 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.527571] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1882.527571] env[63241]: value = "task-1821150" [ 1882.527571] env[63241]: _type = "Task" [ 1882.527571] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.535526] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.658582] env[63241]: DEBUG oslo_vmware.api [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821145, 'name': ReconfigVM_Task, 'duration_secs': 0.790894} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.658830] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377193', 'volume_id': 'a768a43d-46e3-4b6e-b741-89f8691d110a', 'name': 'volume-a768a43d-46e3-4b6e-b741-89f8691d110a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e62f49f0-370d-4b5d-ab43-72e0e6238432', 'attached_at': '', 'detached_at': '', 'volume_id': 'a768a43d-46e3-4b6e-b741-89f8691d110a', 'serial': 'a768a43d-46e3-4b6e-b741-89f8691d110a'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1882.711729] env[63241]: DEBUG oslo_concurrency.lockutils [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.712083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.712432] env[63241]: DEBUG oslo_concurrency.lockutils [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.712700] env[63241]: DEBUG oslo_concurrency.lockutils [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.712991] env[63241]: DEBUG oslo_concurrency.lockutils [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.715028] env[63241]: INFO nova.compute.manager [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Terminating instance [ 1882.716822] env[63241]: DEBUG nova.compute.manager [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1882.717015] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1882.717859] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b90a3a-5cce-48f9-83cd-15be34889267 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.725942] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1882.726198] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed76b7d5-3b4a-4b52-b794-87cb0d4bc1b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.733752] env[63241]: DEBUG oslo_vmware.api [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1882.733752] env[63241]: value = "task-1821151" [ 1882.733752] env[63241]: _type = "Task" [ 1882.733752] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.741432] env[63241]: DEBUG oslo_vmware.api [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821151, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.752121] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d2997f36-829d-45be-a0ce-6a13ab5a55b0 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.229s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.767045] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e1368f-10c7-c763-3286-0ab038fecfb8, 'name': SearchDatastore_Task, 'duration_secs': 0.009832} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.767872] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.768142] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] bf339484-4b96-4326-b035-39783aff4461/bf339484-4b96-4326-b035-39783aff4461.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1882.768405] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-921e7b8b-c3e6-4dbf-b2c4-979cf6950e44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.775513] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1882.775513] env[63241]: value = "task-1821152" [ 1882.775513] env[63241]: _type = "Task" [ 1882.775513] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.784207] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.041135] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821150, 'name': PowerOnVM_Task, 'duration_secs': 0.475916} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.041505] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1883.042205] env[63241]: INFO nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1883.042205] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1883.042719] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d4baee-c72e-4748-92ea-7155dc49b8cb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.174763] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updating instance_info_cache with network_info: [{"id": "6bc11935-f0d5-456c-b815-ea415689a621", "address": "fa:16:3e:de:e7:97", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bc11935-f0", "ovs_interfaceid": "6bc11935-f0d5-456c-b815-ea415689a621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.210523] env[63241]: DEBUG nova.objects.instance [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lazy-loading 'flavor' on Instance uuid e62f49f0-370d-4b5d-ab43-72e0e6238432 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1883.246067] env[63241]: DEBUG oslo_vmware.api [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821151, 'name': PowerOffVM_Task, 'duration_secs': 0.174403} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.246375] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1883.246552] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1883.246815] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6cb1d0c5-b42b-41c9-9d43-4b829f3b247f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.285823] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821152, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.446585] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1883.446585] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1883.446585] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleting the datastore file [datastore1] 6b96988b-cc79-41d7-a17d-277ae5aeb4dc {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1883.446585] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c112a914-8c11-4f71-8acd-ccedd12f3087 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.454179] env[63241]: DEBUG oslo_vmware.api [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for the task: (returnval){ [ 1883.454179] env[63241]: value = "task-1821154" [ 1883.454179] env[63241]: _type = "Task" [ 1883.454179] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.462605] env[63241]: DEBUG oslo_vmware.api [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821154, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.561880] env[63241]: INFO nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Took 13.09 seconds to build instance. [ 1883.677706] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.677871] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1883.678065] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.678312] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.678481] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.678631] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.678772] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.678921] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.679061] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1883.679207] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.786604] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821152, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515737} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.786872] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] bf339484-4b96-4326-b035-39783aff4461/bf339484-4b96-4326-b035-39783aff4461.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1883.787097] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1883.787655] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f59684e-e4a0-4a33-8e72-6a5d45bcecf3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.795096] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1883.795096] env[63241]: value = "task-1821155" [ 1883.795096] env[63241]: _type = "Task" [ 1883.795096] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.803213] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821155, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.811738] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.811974] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.812202] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.812387] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.812560] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.814651] env[63241]: INFO nova.compute.manager [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Terminating instance [ 1883.816356] env[63241]: DEBUG nova.compute.manager [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1883.816550] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1883.817349] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ae1353-bc3a-4166-9af3-269d413fa7e3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.825242] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1883.825473] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-652dda15-b110-48f7-a03b-0eda7f05247d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.832666] env[63241]: DEBUG oslo_vmware.api [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1883.832666] env[63241]: value = "task-1821156" [ 1883.832666] env[63241]: _type = "Task" [ 1883.832666] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.840283] env[63241]: DEBUG oslo_vmware.api [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.963013] env[63241]: DEBUG oslo_vmware.api [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Task: {'id': task-1821154, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171835} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.963308] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1883.963529] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1883.963742] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1883.963926] env[63241]: INFO nova.compute.manager [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1883.964231] env[63241]: DEBUG oslo.service.loopingcall [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1883.964400] env[63241]: DEBUG nova.compute.manager [-] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1883.964495] env[63241]: DEBUG nova.network.neutron [-] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1884.064466] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "98e7f776-b36e-4132-803e-f2272e26c44e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.600s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.181626] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.182042] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.182042] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.182231] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1884.183313] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbba6281-0998-472e-8386-bd5aec490139 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.190833] env[63241]: DEBUG nova.compute.manager [req-cdc1bb83-e7e9-4a28-acc4-bdaf0c950b19 req-5dfabdb2-5e90-4ba6-a068-9a304475d7ea service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Received event network-vif-deleted-6f97669d-a2c6-4625-a1b6-374f5565ebb0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1884.190833] env[63241]: INFO nova.compute.manager [req-cdc1bb83-e7e9-4a28-acc4-bdaf0c950b19 req-5dfabdb2-5e90-4ba6-a068-9a304475d7ea service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Neutron deleted interface 6f97669d-a2c6-4625-a1b6-374f5565ebb0; detaching it from the instance and deleting it from the info cache [ 1884.190922] env[63241]: DEBUG nova.network.neutron [req-cdc1bb83-e7e9-4a28-acc4-bdaf0c950b19 req-5dfabdb2-5e90-4ba6-a068-9a304475d7ea service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.195606] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b54314-ccbe-44a2-8489-f583d5d93875 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.210583] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec01c2e9-c4f7-4b27-9dc7-534400f04487 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.217048] env[63241]: DEBUG oslo_concurrency.lockutils [None req-30e88df6-d7fe-4e4e-90df-662506a60c4f tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.743s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.218920] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21bca095-6287-432a-a747-b78ddfe96fa7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.253488] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179336MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1884.253657] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.253855] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.308075] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821155, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069377} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.308075] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1884.308075] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cebaff-2141-4743-b8d3-74f7972c7440 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.337232] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] bf339484-4b96-4326-b035-39783aff4461/bf339484-4b96-4326-b035-39783aff4461.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1884.337232] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25a38356-03b3-484b-8333-87988b8fe2ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.361374] env[63241]: DEBUG oslo_vmware.api [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821156, 'name': PowerOffVM_Task, 'duration_secs': 0.512914} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.361374] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1884.361606] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1884.361760] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1884.361760] env[63241]: value = "task-1821157" [ 1884.361760] env[63241]: _type = "Task" [ 1884.361760] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.361965] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3f05c17-9e8f-46ba-b769-b65559db1bb2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.371126] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821157, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.512044] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1884.512367] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1884.512637] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleting the datastore file [datastore1] 01af6dc5-e0e7-4f8b-ad07-73af80c32577 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1884.512939] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-901f9180-3681-4eb7-b1cb-fc1649890a8d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.520648] env[63241]: DEBUG oslo_vmware.api [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1884.520648] env[63241]: value = "task-1821160" [ 1884.520648] env[63241]: _type = "Task" [ 1884.520648] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.529249] env[63241]: DEBUG oslo_vmware.api [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821160, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.663073] env[63241]: DEBUG nova.network.neutron [-] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.695857] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-978049bc-50d3-4dc4-a1e0-c5565245b17e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.704947] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff59ea97-3b51-41fe-b1a3-cd35bcaa81c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.736530] env[63241]: DEBUG nova.compute.manager [req-cdc1bb83-e7e9-4a28-acc4-bdaf0c950b19 req-5dfabdb2-5e90-4ba6-a068-9a304475d7ea service nova] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Detach interface failed, port_id=6f97669d-a2c6-4625-a1b6-374f5565ebb0, reason: Instance 6b96988b-cc79-41d7-a17d-277ae5aeb4dc could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1884.758747] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.759025] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.759287] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "e62f49f0-370d-4b5d-ab43-72e0e6238432-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.759474] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.759644] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.766269] env[63241]: INFO nova.compute.manager [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Terminating instance [ 1884.768141] env[63241]: DEBUG nova.compute.manager [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1884.768335] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1884.769324] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099b4d87-23c9-41cc-b2a1-d58a527c2499 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.777262] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1884.778029] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8281bc92-945d-42eb-9f8a-ada5a7019afb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.784299] env[63241]: DEBUG oslo_vmware.api [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1884.784299] env[63241]: value = "task-1821161" [ 1884.784299] env[63241]: _type = "Task" [ 1884.784299] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.792733] env[63241]: DEBUG oslo_vmware.api [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.873430] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821157, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.030626] env[63241]: DEBUG oslo_vmware.api [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821160, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311067} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.030806] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1885.030855] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1885.031011] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1885.031252] env[63241]: INFO nova.compute.manager [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1885.031519] env[63241]: DEBUG oslo.service.loopingcall [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1885.031686] env[63241]: DEBUG nova.compute.manager [-] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1885.031779] env[63241]: DEBUG nova.network.neutron [-] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1885.131685] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.131972] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.132249] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.132450] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.132659] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.134802] env[63241]: INFO nova.compute.manager [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Terminating instance [ 1885.136592] env[63241]: DEBUG nova.compute.manager [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1885.136782] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1885.137620] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9138eb8b-6fe1-4cd7-b88d-7fdc5e3be41a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.148382] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1885.148382] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c899ddc3-b1ca-4d42-9f06-dee20a924795 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.152722] env[63241]: DEBUG oslo_vmware.api [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1885.152722] env[63241]: value = "task-1821162" [ 1885.152722] env[63241]: _type = "Task" [ 1885.152722] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.161607] env[63241]: DEBUG oslo_vmware.api [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821162, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.165322] env[63241]: INFO nova.compute.manager [-] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Took 1.20 seconds to deallocate network for instance. [ 1885.293468] env[63241]: DEBUG oslo_vmware.api [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821161, 'name': PowerOffVM_Task, 'duration_secs': 0.211963} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.294599] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1885.294599] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1885.294599] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00bf2fc8-67c6-4430-902b-613dd7101362 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.296349] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 31e84206-e583-4610-969e-2ccae2d0b206 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1885.296481] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance fb5d60fa-fa13-44a1-8291-4645761a0c80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1885.296601] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 14af9f82-525e-453c-8dc5-ef5b13c67ee4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1885.296762] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e62f49f0-370d-4b5d-ab43-72e0e6238432 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1885.296902] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e28ba013-0bc5-4edc-858d-674980bc8742 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1885.297030] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 6b96988b-cc79-41d7-a17d-277ae5aeb4dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1885.297152] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 01af6dc5-e0e7-4f8b-ad07-73af80c32577 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1885.297267] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 779d2380-be6c-4fdb-8755-10e99f8a6fd9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1885.297378] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance c0ea8cf6-4023-4093-b0bc-67b02604125a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1885.297488] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 98e7f776-b36e-4132-803e-f2272e26c44e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1885.299834] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance bf339484-4b96-4326-b035-39783aff4461 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1885.299834] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1885.299834] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1885.373332] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821157, 'name': ReconfigVM_Task, 'duration_secs': 0.518781} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.373657] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Reconfigured VM instance instance-0000006c to attach disk [datastore1] bf339484-4b96-4326-b035-39783aff4461/bf339484-4b96-4326-b035-39783aff4461.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1885.374321] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-376dcaa9-0a55-4e1a-aa3a-ef105288d526 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.384082] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1885.384082] env[63241]: value = "task-1821164" [ 1885.384082] env[63241]: _type = "Task" [ 1885.384082] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.394158] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821164, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.459277] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34aa89bc-08c7-4ac2-9d2b-ce85851233b8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.468261] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fac5826-cccb-4e95-8ac6-05b4c507fd01 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.503818] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8839af1b-243b-44ca-a0d8-d1b2bf6ae6ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.506470] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1885.506670] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1885.506851] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleting the datastore file [datastore1] e62f49f0-370d-4b5d-ab43-72e0e6238432 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1885.507120] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c51a4482-ecdd-4e8c-a334-f76847e3c0c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.515435] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8efb97-9e9f-4623-b50a-07da14499f7d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.519418] env[63241]: DEBUG oslo_vmware.api [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1885.519418] env[63241]: value = "task-1821165" [ 1885.519418] env[63241]: _type = "Task" [ 1885.519418] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.530535] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1885.537147] env[63241]: DEBUG oslo_vmware.api [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821165, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.663334] env[63241]: DEBUG oslo_vmware.api [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821162, 'name': PowerOffVM_Task, 'duration_secs': 0.339132} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.667224] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1885.667224] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1885.667224] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10a3a2e5-7353-4b03-8fd9-3f7ecbfe2222 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.672759] env[63241]: DEBUG oslo_concurrency.lockutils [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.760822] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1885.761106] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1885.761336] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Deleting the datastore file [datastore1] 14af9f82-525e-453c-8dc5-ef5b13c67ee4 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1885.761617] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4153c6a-4795-4b34-9883-61dc79697906 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.768979] env[63241]: DEBUG oslo_vmware.api [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1885.768979] env[63241]: value = "task-1821167" [ 1885.768979] env[63241]: _type = "Task" [ 1885.768979] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.777236] env[63241]: DEBUG oslo_vmware.api [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.893941] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821164, 'name': Rename_Task, 'duration_secs': 0.197363} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.894288] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1885.894847] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c069ad9-432e-4148-a7d0-9076b7025b4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.902391] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1885.902391] env[63241]: value = "task-1821168" [ 1885.902391] env[63241]: _type = "Task" [ 1885.902391] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.910537] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821168, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.028725] env[63241]: DEBUG oslo_vmware.api [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821165, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149943} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.028987] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1886.029227] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1886.030725] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1886.030725] env[63241]: INFO nova.compute.manager [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1886.030725] env[63241]: DEBUG oslo.service.loopingcall [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1886.030725] env[63241]: DEBUG nova.compute.manager [-] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1886.030941] env[63241]: DEBUG nova.network.neutron [-] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1886.033024] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1886.219684] env[63241]: DEBUG nova.compute.manager [req-7269ec1a-c583-4e44-ad88-345ab4f071d1 req-26ff9e2f-fa33-4fc5-8b40-65a76441edbd service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Received event network-vif-deleted-7a0be842-edfe-48ff-9275-dbb260c7e781 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1886.219948] env[63241]: INFO nova.compute.manager [req-7269ec1a-c583-4e44-ad88-345ab4f071d1 req-26ff9e2f-fa33-4fc5-8b40-65a76441edbd service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Neutron deleted interface 7a0be842-edfe-48ff-9275-dbb260c7e781; detaching it from the instance and deleting it from the info cache [ 1886.220086] env[63241]: DEBUG nova.network.neutron [req-7269ec1a-c583-4e44-ad88-345ab4f071d1 req-26ff9e2f-fa33-4fc5-8b40-65a76441edbd service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.279089] env[63241]: DEBUG oslo_vmware.api [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821167, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248799} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.279089] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1886.279089] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1886.279089] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1886.279359] env[63241]: INFO nova.compute.manager [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1886.279582] env[63241]: DEBUG oslo.service.loopingcall [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1886.279790] env[63241]: DEBUG nova.compute.manager [-] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1886.279886] env[63241]: DEBUG nova.network.neutron [-] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1886.288170] env[63241]: DEBUG nova.network.neutron [-] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.413394] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821168, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.538717] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1886.538717] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.284s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.538717] env[63241]: DEBUG oslo_concurrency.lockutils [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.866s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.538717] env[63241]: DEBUG nova.objects.instance [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lazy-loading 'resources' on Instance uuid 6b96988b-cc79-41d7-a17d-277ae5aeb4dc {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1886.575959] env[63241]: DEBUG nova.compute.manager [req-a85a1407-94a8-4d09-9f29-52b54fceb683 req-73f100aa-3d11-4ee4-8fcd-db53b912109f service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Received event network-vif-deleted-f11430c6-423c-4742-b139-54246d031151 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1886.576179] env[63241]: INFO nova.compute.manager [req-a85a1407-94a8-4d09-9f29-52b54fceb683 req-73f100aa-3d11-4ee4-8fcd-db53b912109f service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Neutron deleted interface f11430c6-423c-4742-b139-54246d031151; detaching it from the instance and deleting it from the info cache [ 1886.576358] env[63241]: DEBUG nova.network.neutron [req-a85a1407-94a8-4d09-9f29-52b54fceb683 req-73f100aa-3d11-4ee4-8fcd-db53b912109f service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.724651] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-058e3890-8d2b-464e-828a-2f7f7fdbf354 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.734737] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7709b26c-87b6-4f28-a073-42e08a90f758 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.764770] env[63241]: DEBUG nova.compute.manager [req-7269ec1a-c583-4e44-ad88-345ab4f071d1 req-26ff9e2f-fa33-4fc5-8b40-65a76441edbd service nova] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Detach interface failed, port_id=7a0be842-edfe-48ff-9275-dbb260c7e781, reason: Instance 01af6dc5-e0e7-4f8b-ad07-73af80c32577 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1886.790652] env[63241]: INFO nova.compute.manager [-] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Took 1.76 seconds to deallocate network for instance. [ 1886.914091] env[63241]: DEBUG oslo_vmware.api [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821168, 'name': PowerOnVM_Task, 'duration_secs': 0.653679} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.914091] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1886.914261] env[63241]: INFO nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Took 9.89 seconds to spawn the instance on the hypervisor. [ 1886.914447] env[63241]: DEBUG nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1886.915237] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b61298-5ef0-45bd-9048-aa6b231d667d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.925116] env[63241]: DEBUG nova.network.neutron [-] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.964210] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1886.964475] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377212', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'name': 'volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '779d2380-be6c-4fdb-8755-10e99f8a6fd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'serial': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1886.965575] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9275487-916f-4656-b2ea-f7f2b0014969 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.983138] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91882b68-d87f-4e5d-8d4f-a6475c2fd637 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.009614] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a/volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1887.009919] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba958954-664c-4d72-ae83-9c6964a5b752 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.028536] env[63241]: DEBUG oslo_vmware.api [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1887.028536] env[63241]: value = "task-1821169" [ 1887.028536] env[63241]: _type = "Task" [ 1887.028536] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.037105] env[63241]: DEBUG oslo_vmware.api [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821169, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.048322] env[63241]: DEBUG nova.network.neutron [-] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1887.080640] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce29b484-4b6d-4bb1-91d4-56a9e5413a3b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.093107] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b067592-27bb-43c9-a09a-9ae20dc6a431 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.124307] env[63241]: DEBUG nova.compute.manager [req-a85a1407-94a8-4d09-9f29-52b54fceb683 req-73f100aa-3d11-4ee4-8fcd-db53b912109f service nova] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Detach interface failed, port_id=f11430c6-423c-4742-b139-54246d031151, reason: Instance 14af9f82-525e-453c-8dc5-ef5b13c67ee4 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1887.197430] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8965539b-42b7-46f0-abf7-668ca2a6ec9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.205347] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7fa249d-c4c9-439c-aa90-9c4d7be36baf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.237742] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851f47f1-dc44-459c-9937-d3af834ee6c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.245589] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6220749e-3b02-4d47-acbd-44efe895c602 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.259398] env[63241]: DEBUG nova.compute.provider_tree [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1887.297816] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.428953] env[63241]: INFO nova.compute.manager [-] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Took 1.40 seconds to deallocate network for instance. [ 1887.433099] env[63241]: INFO nova.compute.manager [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Took 16.92 seconds to build instance. [ 1887.538871] env[63241]: DEBUG oslo_vmware.api [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821169, 'name': ReconfigVM_Task, 'duration_secs': 0.403444} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.539221] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfigured VM instance instance-00000066 to attach disk [datastore1] volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a/volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1887.543962] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9f0e242-53d3-4555-9279-28102de742fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.553966] env[63241]: INFO nova.compute.manager [-] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Took 1.27 seconds to deallocate network for instance. [ 1887.564885] env[63241]: DEBUG oslo_vmware.api [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1887.564885] env[63241]: value = "task-1821170" [ 1887.564885] env[63241]: _type = "Task" [ 1887.564885] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.573583] env[63241]: DEBUG oslo_vmware.api [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821170, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.780918] env[63241]: ERROR nova.scheduler.client.report [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] [req-ffd3b0b4-1b1e-4dc9-9b04-733f95043c2e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ffd3b0b4-1b1e-4dc9-9b04-733f95043c2e"}]} [ 1887.797430] env[63241]: DEBUG nova.scheduler.client.report [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1887.812035] env[63241]: DEBUG nova.scheduler.client.report [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1887.812300] env[63241]: DEBUG nova.compute.provider_tree [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1887.823247] env[63241]: DEBUG nova.scheduler.client.report [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1887.840581] env[63241]: DEBUG nova.scheduler.client.report [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1887.934978] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a355f8a0-b337-45d6-b8f9-2544babe4108 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "bf339484-4b96-4326-b035-39783aff4461" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.434s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.938081] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.968171] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8886f4b7-16ef-452f-bc30-8db72c78bcdf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.976569] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040ed037-519e-452e-89c9-dbb998847c83 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.014742] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141e1802-3358-49f0-8f42-033780d424c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.022835] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250df3ba-ac15-4c60-875f-cfde4b511c1d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.036268] env[63241]: DEBUG nova.compute.provider_tree [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1888.059933] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.074689] env[63241]: DEBUG oslo_vmware.api [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821170, 'name': ReconfigVM_Task, 'duration_secs': 0.133896} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.074986] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377212', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'name': 'volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '779d2380-be6c-4fdb-8755-10e99f8a6fd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'serial': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1888.246488] env[63241]: DEBUG nova.compute.manager [req-91e301cb-3058-4a7e-852b-4b9687ba311f req-bf5cea2a-22e6-4193-96db-b25988eb50f1 service nova] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Received event network-vif-deleted-a4ff42d0-66c0-4c4b-9fbc-9e13661ba5ec {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1888.549877] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "98e7f776-b36e-4132-803e-f2272e26c44e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.550151] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "98e7f776-b36e-4132-803e-f2272e26c44e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.550365] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "98e7f776-b36e-4132-803e-f2272e26c44e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.550552] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "98e7f776-b36e-4132-803e-f2272e26c44e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.550723] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "98e7f776-b36e-4132-803e-f2272e26c44e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.552842] env[63241]: INFO nova.compute.manager [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Terminating instance [ 1888.554570] env[63241]: DEBUG nova.compute.manager [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1888.554764] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1888.556023] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9697c96b-06b9-4780-bc48-75e362bf422d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.564514] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1888.565129] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-630d4c75-8e9c-44eb-8c28-5c94771ab4c6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.567713] env[63241]: DEBUG nova.scheduler.client.report [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 160 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1888.567948] env[63241]: DEBUG nova.compute.provider_tree [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 160 to 161 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1888.568148] env[63241]: DEBUG nova.compute.provider_tree [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1888.572221] env[63241]: DEBUG oslo_vmware.api [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1888.572221] env[63241]: value = "task-1821171" [ 1888.572221] env[63241]: _type = "Task" [ 1888.572221] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.582084] env[63241]: DEBUG oslo_vmware.api [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.614034] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "bf339484-4b96-4326-b035-39783aff4461" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.614658] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "bf339484-4b96-4326-b035-39783aff4461" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.614658] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "bf339484-4b96-4326-b035-39783aff4461-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.614658] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "bf339484-4b96-4326-b035-39783aff4461-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.614853] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "bf339484-4b96-4326-b035-39783aff4461-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.616932] env[63241]: INFO nova.compute.manager [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Terminating instance [ 1888.618747] env[63241]: DEBUG nova.compute.manager [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1888.618992] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1888.619892] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43480567-c63f-49f2-b983-0ee3c1e8ac33 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.628174] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1888.628412] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ed75070-8e84-4539-94b6-47a31bdd526a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.635486] env[63241]: DEBUG oslo_vmware.api [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1888.635486] env[63241]: value = "task-1821172" [ 1888.635486] env[63241]: _type = "Task" [ 1888.635486] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.646419] env[63241]: DEBUG oslo_vmware.api [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.073324] env[63241]: DEBUG oslo_concurrency.lockutils [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.535s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.075691] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.778s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.075922] env[63241]: DEBUG nova.objects.instance [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lazy-loading 'resources' on Instance uuid 01af6dc5-e0e7-4f8b-ad07-73af80c32577 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1889.087053] env[63241]: DEBUG oslo_vmware.api [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821171, 'name': PowerOffVM_Task, 'duration_secs': 0.169902} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.087186] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1889.087270] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1889.087511] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-152dbe2c-ff84-417d-ac63-aab3061702d9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.095205] env[63241]: INFO nova.scheduler.client.report [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Deleted allocations for instance 6b96988b-cc79-41d7-a17d-277ae5aeb4dc [ 1889.117996] env[63241]: DEBUG nova.objects.instance [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lazy-loading 'flavor' on Instance uuid 779d2380-be6c-4fdb-8755-10e99f8a6fd9 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1889.146287] env[63241]: DEBUG oslo_vmware.api [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821172, 'name': PowerOffVM_Task, 'duration_secs': 0.18339} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.146641] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1889.146760] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1889.147060] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-839f148f-552f-41d5-86d6-13bea4958f2c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.208406] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1889.208608] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1889.208840] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleting the datastore file [datastore1] 98e7f776-b36e-4132-803e-f2272e26c44e {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1889.209152] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2cccddb1-7621-4ba8-85c2-3ee10dc2908d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.215940] env[63241]: DEBUG oslo_vmware.api [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1889.215940] env[63241]: value = "task-1821175" [ 1889.215940] env[63241]: _type = "Task" [ 1889.215940] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.223301] env[63241]: DEBUG oslo_vmware.api [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.568036] env[63241]: INFO nova.compute.manager [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Rebuilding instance [ 1889.603867] env[63241]: DEBUG oslo_concurrency.lockutils [None req-225dafa6-2475-483c-a78c-5298d2b9e79c tempest-AttachInterfacesTestJSON-1054157255 tempest-AttachInterfacesTestJSON-1054157255-project-member] Lock "6b96988b-cc79-41d7-a17d-277ae5aeb4dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.892s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.606494] env[63241]: DEBUG nova.compute.manager [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1889.607344] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4a766b-61d9-41d6-9f7a-f4ad85da7d0b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.623800] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31b7d21f-d450-48fa-a571-9b192e2aafe7 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.754s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.701095] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26bef4a-03fe-4db0-af8d-2b51f3a0ddf0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.708614] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe3b2f1-83aa-4a66-8d15-330e52275658 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.742924] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67eb3937-932c-4169-85fe-87e8599f0ff7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.752733] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01471dd-6d27-450f-ae6b-e8288229bf87 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.756184] env[63241]: DEBUG oslo_vmware.api [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.765986] env[63241]: DEBUG nova.compute.provider_tree [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1890.122025] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1890.122025] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8e51284-1228-4877-96a7-6d1fe2eb977f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.129101] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1890.129101] env[63241]: value = "task-1821176" [ 1890.129101] env[63241]: _type = "Task" [ 1890.129101] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.137162] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.248746] env[63241]: DEBUG oslo_vmware.api [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.268482] env[63241]: DEBUG nova.scheduler.client.report [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1890.639890] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.749662] env[63241]: DEBUG oslo_vmware.api [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.774048] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.698s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.776115] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.838s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.776358] env[63241]: DEBUG nova.objects.instance [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lazy-loading 'resources' on Instance uuid e62f49f0-370d-4b5d-ab43-72e0e6238432 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1890.797206] env[63241]: INFO nova.scheduler.client.report [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleted allocations for instance 01af6dc5-e0e7-4f8b-ad07-73af80c32577 [ 1890.901047] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1890.901424] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1890.901807] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleting the datastore file [datastore1] bf339484-4b96-4326-b035-39783aff4461 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1890.902225] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb63d657-3da2-4224-8f29-cae8ffefbfdc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.909241] env[63241]: DEBUG oslo_vmware.api [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1890.909241] env[63241]: value = "task-1821177" [ 1890.909241] env[63241]: _type = "Task" [ 1890.909241] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.918685] env[63241]: DEBUG oslo_vmware.api [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821177, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.143609] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821176, 'name': PowerOffVM_Task, 'duration_secs': 0.956219} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.144645] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1891.201783] env[63241]: INFO nova.compute.manager [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Detaching volume ddd89002-2b7d-4b7a-a368-0b2fe43b975a [ 1891.238923] env[63241]: INFO nova.virt.block_device [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Attempting to driver detach volume ddd89002-2b7d-4b7a-a368-0b2fe43b975a from mountpoint /dev/sdb [ 1891.239253] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1891.239459] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377212', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'name': 'volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '779d2380-be6c-4fdb-8755-10e99f8a6fd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'serial': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1891.244360] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86cf6ebf-4fda-4c31-a4e0-d1ddd0213fc3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.251230] env[63241]: DEBUG oslo_vmware.api [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821175, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.846293} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.266605] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1891.266822] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1891.267010] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1891.267263] env[63241]: INFO nova.compute.manager [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Took 2.71 seconds to destroy the instance on the hypervisor. [ 1891.267472] env[63241]: DEBUG oslo.service.loopingcall [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.267748] env[63241]: DEBUG nova.compute.manager [-] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1891.268046] env[63241]: DEBUG nova.network.neutron [-] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1891.270077] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30faa6e-62ca-4bac-b091-4b18eca8ceae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.277774] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9303c045-817e-45a3-8f8e-a8dd70107eb5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.307383] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57302b7-3f43-49a0-a8c3-ea60d24ce4d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.312266] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1917a437-e494-4a72-b8f4-5e15accb24f9 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "01af6dc5-e0e7-4f8b-ad07-73af80c32577" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.500s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.328586] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] The volume has not been displaced from its original location: [datastore1] volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a/volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1891.334829] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfiguring VM instance instance-00000066 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1891.339991] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-210827d2-ae30-416b-a640-8e035a483375 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.356563] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1891.356563] env[63241]: value = "task-1821178" [ 1891.356563] env[63241]: _type = "Task" [ 1891.356563] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.365533] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821178, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.421789] env[63241]: DEBUG oslo_vmware.api [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821177, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165474} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.421991] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1891.422197] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1891.422788] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1891.422788] env[63241]: INFO nova.compute.manager [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: bf339484-4b96-4326-b035-39783aff4461] Took 2.80 seconds to destroy the instance on the hypervisor. [ 1891.422904] env[63241]: DEBUG oslo.service.loopingcall [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.423213] env[63241]: DEBUG nova.compute.manager [-] [instance: bf339484-4b96-4326-b035-39783aff4461] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1891.423213] env[63241]: DEBUG nova.network.neutron [-] [instance: bf339484-4b96-4326-b035-39783aff4461] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1891.471019] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f2914e-5bfb-47ee-98dc-1a0a12b49ad7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.480266] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b054a3-8464-4887-9ed7-a55cf50a5922 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.516555] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd7f7fc-857e-4880-bfe1-45ba9c38698c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.525788] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b503ec31-4bbe-4432-9fbd-9a1b546b9449 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.540489] env[63241]: DEBUG nova.compute.provider_tree [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1891.622775] env[63241]: DEBUG nova.compute.manager [req-504eebbe-dc8f-4bdf-8dc5-5e5784b4b1e0 req-0e8e2677-730a-4489-a2ec-75ae704b0487 service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Received event network-vif-deleted-58c509e6-4c6b-4a29-9906-ff3258989040 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1891.623133] env[63241]: INFO nova.compute.manager [req-504eebbe-dc8f-4bdf-8dc5-5e5784b4b1e0 req-0e8e2677-730a-4489-a2ec-75ae704b0487 service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Neutron deleted interface 58c509e6-4c6b-4a29-9906-ff3258989040; detaching it from the instance and deleting it from the info cache [ 1891.623434] env[63241]: DEBUG nova.network.neutron [req-504eebbe-dc8f-4bdf-8dc5-5e5784b4b1e0 req-0e8e2677-730a-4489-a2ec-75ae704b0487 service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.834443] env[63241]: DEBUG nova.compute.manager [req-0f05ed96-9020-4da0-9f59-2e87adbe4870 req-70116c54-b69e-490e-9611-4131f783707b service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] Received event network-vif-deleted-5c215984-3f1e-41db-966c-17d3c097a862 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1891.834691] env[63241]: INFO nova.compute.manager [req-0f05ed96-9020-4da0-9f59-2e87adbe4870 req-70116c54-b69e-490e-9611-4131f783707b service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] Neutron deleted interface 5c215984-3f1e-41db-966c-17d3c097a862; detaching it from the instance and deleting it from the info cache [ 1891.834892] env[63241]: DEBUG nova.network.neutron [req-0f05ed96-9020-4da0-9f59-2e87adbe4870 req-70116c54-b69e-490e-9611-4131f783707b service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.866234] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821178, 'name': ReconfigVM_Task, 'duration_secs': 0.393133} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.866536] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfigured VM instance instance-00000066 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1891.871622] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19fc2a10-2793-4966-9909-dd2f069863ba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.886242] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1891.886242] env[63241]: value = "task-1821179" [ 1891.886242] env[63241]: _type = "Task" [ 1891.886242] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.900442] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821179, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.044608] env[63241]: DEBUG nova.scheduler.client.report [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1892.090341] env[63241]: DEBUG nova.network.neutron [-] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.125713] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0555ae2-07e4-4223-9e0d-77a7ae383049 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.136244] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f83910e-361e-424c-aea9-244534a19a34 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.163944] env[63241]: DEBUG nova.compute.manager [req-504eebbe-dc8f-4bdf-8dc5-5e5784b4b1e0 req-0e8e2677-730a-4489-a2ec-75ae704b0487 service nova] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Detach interface failed, port_id=58c509e6-4c6b-4a29-9906-ff3258989040, reason: Instance 98e7f776-b36e-4132-803e-f2272e26c44e could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1892.310038] env[63241]: DEBUG nova.network.neutron [-] [instance: bf339484-4b96-4326-b035-39783aff4461] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.338075] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8879aef-7849-42a3-a82f-439609a7a500 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.348256] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b2eb4e-ec4d-450c-8ad5-ce9886e5a613 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.380597] env[63241]: DEBUG nova.compute.manager [req-0f05ed96-9020-4da0-9f59-2e87adbe4870 req-70116c54-b69e-490e-9611-4131f783707b service nova] [instance: bf339484-4b96-4326-b035-39783aff4461] Detach interface failed, port_id=5c215984-3f1e-41db-966c-17d3c097a862, reason: Instance bf339484-4b96-4326-b035-39783aff4461 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1892.396835] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821179, 'name': ReconfigVM_Task, 'duration_secs': 0.131354} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.397860] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377212', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'name': 'volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '779d2380-be6c-4fdb-8755-10e99f8a6fd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'serial': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1892.549670] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.773s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.551869] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.492s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.551869] env[63241]: DEBUG nova.objects.instance [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lazy-loading 'resources' on Instance uuid 14af9f82-525e-453c-8dc5-ef5b13c67ee4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1892.574753] env[63241]: INFO nova.scheduler.client.report [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleted allocations for instance e62f49f0-370d-4b5d-ab43-72e0e6238432 [ 1892.592604] env[63241]: INFO nova.compute.manager [-] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Took 1.32 seconds to deallocate network for instance. [ 1892.813938] env[63241]: INFO nova.compute.manager [-] [instance: bf339484-4b96-4326-b035-39783aff4461] Took 1.39 seconds to deallocate network for instance. [ 1893.082193] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c4f149f5-ff48-4913-b83e-a130a9539af0 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "e62f49f0-370d-4b5d-ab43-72e0e6238432" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.323s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.100088] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.161941] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f6f23d-a385-4767-9001-3a10d7157927 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.169992] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49dccc2-ca3f-4462-9f16-b8dd32ccd299 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.200286] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cd979a-7782-4434-9cce-32bdcd5349db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.207370] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f67185e-dfd6-4180-8228-01fac6e4074d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.221342] env[63241]: DEBUG nova.compute.provider_tree [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1893.321811] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.477926] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1893.478361] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13fcce9e-926b-4696-b701-6374e51997bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.486128] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1893.486128] env[63241]: value = "task-1821180" [ 1893.486128] env[63241]: _type = "Task" [ 1893.486128] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.493786] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821180, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.602158] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.604862] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.726282] env[63241]: DEBUG nova.scheduler.client.report [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1894.004560] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1894.005130] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1894.005478] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377212', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'name': 'volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '779d2380-be6c-4fdb-8755-10e99f8a6fd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'serial': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1894.007165] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600bf22a-d79b-4c56-a774-e567ad3b2abb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.043812] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1465cbbc-f1d1-4aad-959a-2f0c5ae1e589 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.053414] env[63241]: WARNING nova.virt.vmwareapi.driver [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1894.053882] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1894.056274] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd15896-1dcb-428f-aa70-b18b86d97013 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.064693] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1894.065176] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7078f8e5-15f1-413c-a33e-d045b313c7c7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.110360] env[63241]: DEBUG nova.compute.manager [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1894.232256] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.681s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.234863] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.135s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.235338] env[63241]: DEBUG nova.objects.instance [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lazy-loading 'resources' on Instance uuid 98e7f776-b36e-4132-803e-f2272e26c44e {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1894.271996] env[63241]: INFO nova.scheduler.client.report [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Deleted allocations for instance 14af9f82-525e-453c-8dc5-ef5b13c67ee4 [ 1894.636464] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.780551] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5a3d7ab1-0110-4ab9-948e-b70ae366dc31 tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "14af9f82-525e-453c-8dc5-ef5b13c67ee4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.648s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.854839] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5427ac74-dbf6-40d6-a5cc-fa1650e61532 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.864050] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af9a2a0-cbca-4e7c-a2eb-bfe1d58e9fc0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.898847] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c0e7b4-9cbf-496a-99a8-386d66ed0188 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.906850] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365e326c-11a4-4ec6-ae01-7713530b78d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.922843] env[63241]: DEBUG nova.compute.provider_tree [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1894.982254] env[63241]: DEBUG oslo_concurrency.lockutils [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "fb5d60fa-fa13-44a1-8291-4645761a0c80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.982670] env[63241]: DEBUG oslo_concurrency.lockutils [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "fb5d60fa-fa13-44a1-8291-4645761a0c80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.983016] env[63241]: DEBUG oslo_concurrency.lockutils [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "fb5d60fa-fa13-44a1-8291-4645761a0c80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.983344] env[63241]: DEBUG oslo_concurrency.lockutils [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "fb5d60fa-fa13-44a1-8291-4645761a0c80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.983624] env[63241]: DEBUG oslo_concurrency.lockutils [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "fb5d60fa-fa13-44a1-8291-4645761a0c80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.987405] env[63241]: INFO nova.compute.manager [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Terminating instance [ 1894.990530] env[63241]: DEBUG nova.compute.manager [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1894.990830] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1894.992153] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b128d3c1-6bbe-4b81-aec4-06994ff513b8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.003793] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1895.004164] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b61649b7-4cce-48c2-9012-f48cb3fe268d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.011819] env[63241]: DEBUG oslo_vmware.api [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1895.011819] env[63241]: value = "task-1821182" [ 1895.011819] env[63241]: _type = "Task" [ 1895.011819] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.023717] env[63241]: DEBUG oslo_vmware.api [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821182, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.425998] env[63241]: DEBUG nova.scheduler.client.report [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1895.521364] env[63241]: DEBUG oslo_vmware.api [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821182, 'name': PowerOffVM_Task, 'duration_secs': 0.224612} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.521487] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1895.521658] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1895.521906] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91e5e17a-a598-4934-86bb-680823944fb8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.560256] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "26b75825-49c4-4870-957a-a2a76a970880" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.560517] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.930896] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.696s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.934237] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.612s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.934237] env[63241]: DEBUG nova.objects.instance [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lazy-loading 'resources' on Instance uuid bf339484-4b96-4326-b035-39783aff4461 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1895.967303] env[63241]: INFO nova.scheduler.client.report [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleted allocations for instance 98e7f776-b36e-4132-803e-f2272e26c44e [ 1896.063525] env[63241]: DEBUG nova.compute.manager [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1896.475094] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9d4653ef-d305-4cca-bef4-6d6599908a96 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "98e7f776-b36e-4132-803e-f2272e26c44e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.925s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.564781] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2f4706-9357-4400-8c9c-142739c2f1a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.577428] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d171e7-2ed4-4296-94a6-7bc26c219e27 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.609783] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.610649] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c114d4-b027-498f-9707-31e62b3295d8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.618242] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f094547b-9abf-47b5-bc5f-6540f7c59d71 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.633576] env[63241]: DEBUG nova.compute.provider_tree [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1896.639217] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.639463] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.134869] env[63241]: DEBUG nova.scheduler.client.report [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1897.141514] env[63241]: DEBUG nova.compute.manager [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1897.639173] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.706s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.641526] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.005s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.644187] env[63241]: INFO nova.compute.claims [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1897.662412] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.669659] env[63241]: INFO nova.scheduler.client.report [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleted allocations for instance bf339484-4b96-4326-b035-39783aff4461 [ 1898.183847] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bffae6c-802c-4184-981d-58fccb05d613 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "bf339484-4b96-4326-b035-39783aff4461" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.569s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.762744] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af17b72d-7677-4047-8009-b4bf89eb2be4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.769668] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "215f658f-2af6-4525-b94c-489ad794e6f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.769892] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "215f658f-2af6-4525-b94c-489ad794e6f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.774156] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a00405-939b-485f-9f92-c010f2199b2a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.806436] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0bb3e3-437c-4e8a-a574-a507377dc0af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.815027] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52025429-6a0a-4e93-9ce4-1f9864a37b0e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.831078] env[63241]: DEBUG nova.compute.provider_tree [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1899.232721] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1899.233022] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1899.233243] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Deleting the datastore file [datastore1] fb5d60fa-fa13-44a1-8291-4645761a0c80 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1899.233523] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f4f6e0e-17b7-481c-b185-d84428d23685 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.240499] env[63241]: DEBUG oslo_vmware.api [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for the task: (returnval){ [ 1899.240499] env[63241]: value = "task-1821184" [ 1899.240499] env[63241]: _type = "Task" [ 1899.240499] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.248331] env[63241]: DEBUG oslo_vmware.api [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.271941] env[63241]: DEBUG nova.compute.manager [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1899.280009] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.280277] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.310184] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.310453] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.334256] env[63241]: DEBUG nova.scheduler.client.report [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1899.734914] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1899.734914] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1899.735184] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleting the datastore file [datastore1] 779d2380-be6c-4fdb-8755-10e99f8a6fd9 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1899.735360] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1634870-b1db-4b34-9a64-5fbac24fafe1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.745989] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1899.745989] env[63241]: value = "task-1821185" [ 1899.745989] env[63241]: _type = "Task" [ 1899.745989] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.752541] env[63241]: DEBUG oslo_vmware.api [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.757173] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.782569] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1899.796876] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.813524] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1899.838915] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.197s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.839490] env[63241]: DEBUG nova.compute.manager [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1899.842642] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.233s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.844670] env[63241]: INFO nova.compute.claims [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1900.254794] env[63241]: DEBUG oslo_vmware.api [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Task: {'id': task-1821184, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.625198} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.258558] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1900.258846] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1900.259121] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1900.259323] env[63241]: INFO nova.compute.manager [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Took 5.27 seconds to destroy the instance on the hypervisor. [ 1900.259590] env[63241]: DEBUG oslo.service.loopingcall [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1900.259868] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144179} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.260135] env[63241]: DEBUG nova.compute.manager [-] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1900.260251] env[63241]: DEBUG nova.network.neutron [-] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1900.261829] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1900.262027] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1900.262205] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1900.303298] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.331841] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.348818] env[63241]: DEBUG nova.compute.utils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1900.353408] env[63241]: DEBUG nova.compute.manager [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1900.353507] env[63241]: DEBUG nova.network.neutron [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1900.392467] env[63241]: DEBUG nova.policy [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb9af037642e4044826d210ea26affee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d257d51a2254f5386fd3348602e5b71', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1900.673269] env[63241]: DEBUG nova.network.neutron [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Successfully created port: 249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1900.701049] env[63241]: DEBUG nova.compute.manager [req-aaf238de-e968-4c5a-b2f2-2b253f75b062 req-e86410ac-12ce-4e40-acd8-56184beaee29 service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Received event network-vif-deleted-e025d87f-adf8-4be9-91fa-85161ae568cf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1900.701049] env[63241]: INFO nova.compute.manager [req-aaf238de-e968-4c5a-b2f2-2b253f75b062 req-e86410ac-12ce-4e40-acd8-56184beaee29 service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Neutron deleted interface e025d87f-adf8-4be9-91fa-85161ae568cf; detaching it from the instance and deleting it from the info cache [ 1900.701049] env[63241]: DEBUG nova.network.neutron [req-aaf238de-e968-4c5a-b2f2-2b253f75b062 req-e86410ac-12ce-4e40-acd8-56184beaee29 service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1900.767192] env[63241]: INFO nova.virt.block_device [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Booting with volume ddd89002-2b7d-4b7a-a368-0b2fe43b975a at /dev/sdb [ 1900.802995] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e3d64fd-412b-4b03-bb90-d438cdc36ff5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.813134] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cce005-a00a-4252-affc-6ad43de671ce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.839522] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b542c13f-a665-4577-9b57-716f6258a6aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.847872] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc084ae7-dccd-46d7-bcfd-7c0cc883a1f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.858904] env[63241]: DEBUG nova.compute.manager [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1900.882205] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd01a976-6af8-4058-ad8d-3648160e4133 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.890620] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108ebf09-e8b8-4f6e-b68d-22772451e2d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.906773] env[63241]: DEBUG nova.virt.block_device [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Updating existing volume attachment record: f066b59a-2ef4-49fc-ab25-987f089328aa {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1901.028280] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435ae56c-11fe-4905-a2f4-834143eaf435 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.035912] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ac7161-6c89-4fa0-a3ca-630e82b0a6e4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.067859] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3258d4d-fe0a-48cf-9356-64b8c9c1bebe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.075718] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae3b1b2-77ad-4a76-bafb-3ff71f1467a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.089141] env[63241]: DEBUG nova.compute.provider_tree [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1901.177045] env[63241]: DEBUG nova.network.neutron [-] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.203029] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34f2ca81-e9fe-43e4-8221-8ca2ec7040f9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.212464] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f8b0ce-f333-471f-be43-c64e23a16d9f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.238146] env[63241]: DEBUG nova.compute.manager [req-aaf238de-e968-4c5a-b2f2-2b253f75b062 req-e86410ac-12ce-4e40-acd8-56184beaee29 service nova] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Detach interface failed, port_id=e025d87f-adf8-4be9-91fa-85161ae568cf, reason: Instance fb5d60fa-fa13-44a1-8291-4645761a0c80 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1901.592901] env[63241]: DEBUG nova.scheduler.client.report [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1901.680054] env[63241]: INFO nova.compute.manager [-] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Took 1.42 seconds to deallocate network for instance. [ 1901.869342] env[63241]: DEBUG nova.compute.manager [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1901.894391] env[63241]: DEBUG nova.virt.hardware [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1901.894656] env[63241]: DEBUG nova.virt.hardware [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1901.894814] env[63241]: DEBUG nova.virt.hardware [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1901.895008] env[63241]: DEBUG nova.virt.hardware [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1901.895163] env[63241]: DEBUG nova.virt.hardware [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1901.895316] env[63241]: DEBUG nova.virt.hardware [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1901.895525] env[63241]: DEBUG nova.virt.hardware [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1901.895687] env[63241]: DEBUG nova.virt.hardware [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1901.895879] env[63241]: DEBUG nova.virt.hardware [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1901.896073] env[63241]: DEBUG nova.virt.hardware [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1901.896281] env[63241]: DEBUG nova.virt.hardware [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1901.897127] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4bd629e-c123-44de-8f07-3e2f719cedda {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.905237] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0972cc-2678-4cfa-8651-a8acd5ea27a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.054071] env[63241]: DEBUG nova.compute.manager [req-efc23f44-6161-4308-ba89-560dae3eef36 req-bb9ece81-4e4c-4d73-9c91-0b8d69930a26 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Received event network-vif-plugged-249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1902.054372] env[63241]: DEBUG oslo_concurrency.lockutils [req-efc23f44-6161-4308-ba89-560dae3eef36 req-bb9ece81-4e4c-4d73-9c91-0b8d69930a26 service nova] Acquiring lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.054628] env[63241]: DEBUG oslo_concurrency.lockutils [req-efc23f44-6161-4308-ba89-560dae3eef36 req-bb9ece81-4e4c-4d73-9c91-0b8d69930a26 service nova] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.054832] env[63241]: DEBUG oslo_concurrency.lockutils [req-efc23f44-6161-4308-ba89-560dae3eef36 req-bb9ece81-4e4c-4d73-9c91-0b8d69930a26 service nova] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.055078] env[63241]: DEBUG nova.compute.manager [req-efc23f44-6161-4308-ba89-560dae3eef36 req-bb9ece81-4e4c-4d73-9c91-0b8d69930a26 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] No waiting events found dispatching network-vif-plugged-249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1902.055292] env[63241]: WARNING nova.compute.manager [req-efc23f44-6161-4308-ba89-560dae3eef36 req-bb9ece81-4e4c-4d73-9c91-0b8d69930a26 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Received unexpected event network-vif-plugged-249e56d5-0dc5-4bab-9179-ca69f7024104 for instance with vm_state building and task_state spawning. [ 1902.098596] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.256s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.099089] env[63241]: DEBUG nova.compute.manager [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1902.101903] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.439s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.103475] env[63241]: INFO nova.compute.claims [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1902.141322] env[63241]: DEBUG nova.network.neutron [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Successfully updated port: 249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1902.186482] env[63241]: DEBUG oslo_concurrency.lockutils [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.608608] env[63241]: DEBUG nova.compute.utils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1902.611876] env[63241]: DEBUG nova.compute.manager [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1902.612066] env[63241]: DEBUG nova.network.neutron [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1902.643827] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.643959] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.644141] env[63241]: DEBUG nova.network.neutron [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1902.661830] env[63241]: DEBUG nova.policy [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac0c578d40af405b8fe206fcd309cf0a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6c76b46a4cf4a32a4a1c25fb81a963d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1902.912263] env[63241]: DEBUG nova.network.neutron [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Successfully created port: 139ab667-6231-4030-a733-172ac1488ddf {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1903.050289] env[63241]: DEBUG nova.virt.hardware [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1903.050574] env[63241]: DEBUG nova.virt.hardware [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1903.050734] env[63241]: DEBUG nova.virt.hardware [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1903.050917] env[63241]: DEBUG nova.virt.hardware [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1903.051077] env[63241]: DEBUG nova.virt.hardware [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1903.051229] env[63241]: DEBUG nova.virt.hardware [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1903.051436] env[63241]: DEBUG nova.virt.hardware [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1903.051633] env[63241]: DEBUG nova.virt.hardware [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1903.051851] env[63241]: DEBUG nova.virt.hardware [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1903.052048] env[63241]: DEBUG nova.virt.hardware [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1903.052231] env[63241]: DEBUG nova.virt.hardware [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1903.053163] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61bd060-afa3-47ec-b04d-32ab989f23ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.061216] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d9b746-229e-4c28-ab7a-788d7bbefe5c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.074787] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:f8:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b434fe5e-e77d-4974-8bd4-7226a359e28d', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1903.082382] env[63241]: DEBUG oslo.service.loopingcall [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1903.082604] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1903.083182] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72754bd7-87ec-40d1-83fb-b4c9f21d5d88 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.102347] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1903.102347] env[63241]: value = "task-1821186" [ 1903.102347] env[63241]: _type = "Task" [ 1903.102347] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.110870] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821186, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.117860] env[63241]: DEBUG nova.compute.manager [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1903.177399] env[63241]: DEBUG nova.network.neutron [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1903.265860] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980c22cf-cf4b-43f9-b00f-bbd3559da8de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.273874] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0799f4e-0e31-4d55-8f68-90e33fda2889 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.305989] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2963bd-605c-4b4d-878b-4c8c660373dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.313754] env[63241]: DEBUG nova.network.neutron [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updating instance_info_cache with network_info: [{"id": "249e56d5-0dc5-4bab-9179-ca69f7024104", "address": "fa:16:3e:85:eb:39", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap249e56d5-0d", "ovs_interfaceid": "249e56d5-0dc5-4bab-9179-ca69f7024104", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.316044] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3831e392-fe62-4043-ae0b-c729983c7610 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.330286] env[63241]: DEBUG nova.compute.provider_tree [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1903.612541] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821186, 'name': CreateVM_Task, 'duration_secs': 0.371081} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.612741] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1903.613467] env[63241]: DEBUG oslo_concurrency.lockutils [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.613636] env[63241]: DEBUG oslo_concurrency.lockutils [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.613957] env[63241]: DEBUG oslo_concurrency.lockutils [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1903.614231] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dcc5498-548a-42b6-9217-4111e55f76f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.618869] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1903.618869] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d13f59-0573-4fe7-13f3-a23e7d921b09" [ 1903.618869] env[63241]: _type = "Task" [ 1903.618869] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.630651] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d13f59-0573-4fe7-13f3-a23e7d921b09, 'name': SearchDatastore_Task, 'duration_secs': 0.00931} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.630948] env[63241]: DEBUG oslo_concurrency.lockutils [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.631211] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1903.631455] env[63241]: DEBUG oslo_concurrency.lockutils [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.631622] env[63241]: DEBUG oslo_concurrency.lockutils [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.631854] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1903.632161] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9af8866-17b5-44d7-9702-e85e9b585fc5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.639674] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1903.639867] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1903.640589] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6393f180-0d04-4d32-9b33-ff6b11eaa187 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.645605] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1903.645605] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a81cd4-b91e-a460-7706-1adab25c73bd" [ 1903.645605] env[63241]: _type = "Task" [ 1903.645605] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.654566] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a81cd4-b91e-a460-7706-1adab25c73bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.821426] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.821813] env[63241]: DEBUG nova.compute.manager [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Instance network_info: |[{"id": "249e56d5-0dc5-4bab-9179-ca69f7024104", "address": "fa:16:3e:85:eb:39", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap249e56d5-0d", "ovs_interfaceid": "249e56d5-0dc5-4bab-9179-ca69f7024104", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1903.822272] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:eb:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '249e56d5-0dc5-4bab-9179-ca69f7024104', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1903.830321] env[63241]: DEBUG oslo.service.loopingcall [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1903.830564] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1903.831217] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43e2db2c-12f7-4e11-96cc-c417f339b39f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.845728] env[63241]: DEBUG nova.scheduler.client.report [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1903.855166] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1903.855166] env[63241]: value = "task-1821187" [ 1903.855166] env[63241]: _type = "Task" [ 1903.855166] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.863476] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821187, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.079458] env[63241]: DEBUG nova.compute.manager [req-87e54a30-40c7-457e-a79b-f25124817e60 req-14be2eef-e4d1-40e5-8e5b-36a2f3cea211 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Received event network-changed-249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1904.079734] env[63241]: DEBUG nova.compute.manager [req-87e54a30-40c7-457e-a79b-f25124817e60 req-14be2eef-e4d1-40e5-8e5b-36a2f3cea211 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Refreshing instance network info cache due to event network-changed-249e56d5-0dc5-4bab-9179-ca69f7024104. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1904.079986] env[63241]: DEBUG oslo_concurrency.lockutils [req-87e54a30-40c7-457e-a79b-f25124817e60 req-14be2eef-e4d1-40e5-8e5b-36a2f3cea211 service nova] Acquiring lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.080344] env[63241]: DEBUG oslo_concurrency.lockutils [req-87e54a30-40c7-457e-a79b-f25124817e60 req-14be2eef-e4d1-40e5-8e5b-36a2f3cea211 service nova] Acquired lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.080535] env[63241]: DEBUG nova.network.neutron [req-87e54a30-40c7-457e-a79b-f25124817e60 req-14be2eef-e4d1-40e5-8e5b-36a2f3cea211 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Refreshing network info cache for port 249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1904.128554] env[63241]: DEBUG nova.compute.manager [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1904.158528] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a81cd4-b91e-a460-7706-1adab25c73bd, 'name': SearchDatastore_Task, 'duration_secs': 0.008641} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.160612] env[63241]: DEBUG nova.virt.hardware [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1904.160834] env[63241]: DEBUG nova.virt.hardware [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1904.160994] env[63241]: DEBUG nova.virt.hardware [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1904.161194] env[63241]: DEBUG nova.virt.hardware [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1904.161341] env[63241]: DEBUG nova.virt.hardware [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1904.161487] env[63241]: DEBUG nova.virt.hardware [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1904.161688] env[63241]: DEBUG nova.virt.hardware [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1904.161881] env[63241]: DEBUG nova.virt.hardware [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1904.162068] env[63241]: DEBUG nova.virt.hardware [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1904.162237] env[63241]: DEBUG nova.virt.hardware [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1904.162411] env[63241]: DEBUG nova.virt.hardware [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1904.163693] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c27218-b540-4111-b36c-a3e0aeb4b004 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.166505] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8780f291-e16f-400c-81f0-359d0683b436 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.173852] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1904.173852] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5205b8b2-a5cb-d7b8-2ca4-2cad2a9874f3" [ 1904.173852] env[63241]: _type = "Task" [ 1904.173852] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.179477] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692ac39d-e9f2-4d08-91ec-05c3f777f42e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.189268] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5205b8b2-a5cb-d7b8-2ca4-2cad2a9874f3, 'name': SearchDatastore_Task, 'duration_secs': 0.010521} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.196727] env[63241]: DEBUG oslo_concurrency.lockutils [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.196984] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 779d2380-be6c-4fdb-8755-10e99f8a6fd9/779d2380-be6c-4fdb-8755-10e99f8a6fd9.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1904.197419] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc18c5b2-c1c1-4a73-ab11-d7ece2082d92 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.204224] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1904.204224] env[63241]: value = "task-1821188" [ 1904.204224] env[63241]: _type = "Task" [ 1904.204224] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.213197] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821188, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.351219] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.352390] env[63241]: DEBUG nova.compute.manager [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1904.356681] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.560s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.358418] env[63241]: INFO nova.compute.claims [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1904.375140] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821187, 'name': CreateVM_Task, 'duration_secs': 0.386797} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.376103] env[63241]: DEBUG nova.network.neutron [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Successfully updated port: 139ab667-6231-4030-a733-172ac1488ddf {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1904.377417] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1904.378176] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.378821] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.378821] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1904.379118] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13f379aa-820b-4ecc-bdaf-5ae7f9d44c5f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.384944] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1904.384944] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524254fc-6049-d7a7-a851-f0f3a7c9768b" [ 1904.384944] env[63241]: _type = "Task" [ 1904.384944] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.394990] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524254fc-6049-d7a7-a851-f0f3a7c9768b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.652416] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.652755] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.716407] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821188, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.814133] env[63241]: DEBUG nova.network.neutron [req-87e54a30-40c7-457e-a79b-f25124817e60 req-14be2eef-e4d1-40e5-8e5b-36a2f3cea211 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updated VIF entry in instance network info cache for port 249e56d5-0dc5-4bab-9179-ca69f7024104. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1904.814541] env[63241]: DEBUG nova.network.neutron [req-87e54a30-40c7-457e-a79b-f25124817e60 req-14be2eef-e4d1-40e5-8e5b-36a2f3cea211 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updating instance_info_cache with network_info: [{"id": "249e56d5-0dc5-4bab-9179-ca69f7024104", "address": "fa:16:3e:85:eb:39", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap249e56d5-0d", "ovs_interfaceid": "249e56d5-0dc5-4bab-9179-ca69f7024104", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.863217] env[63241]: DEBUG nova.compute.utils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1904.867435] env[63241]: DEBUG nova.compute.manager [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1904.867610] env[63241]: DEBUG nova.network.neutron [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1904.882445] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "refresh_cache-26b75825-49c4-4870-957a-a2a76a970880" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.882586] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "refresh_cache-26b75825-49c4-4870-957a-a2a76a970880" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.882730] env[63241]: DEBUG nova.network.neutron [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1904.895372] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524254fc-6049-d7a7-a851-f0f3a7c9768b, 'name': SearchDatastore_Task, 'duration_secs': 0.109081} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.895644] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.895859] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1904.896103] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.896324] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.897726] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1904.897726] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-219bdc5d-2789-4fa1-aab2-3c49348d7f63 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.904943] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1904.905126] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1904.905825] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94ed396c-f59f-4c00-b308-cfe1eb14707f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.909180] env[63241]: DEBUG nova.policy [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c54558668b9d43bd9adc17fce71df03e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f7af6cf881f84203a7f0a546466bf76f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1904.914164] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1904.914164] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b84783-d210-a60f-db96-312b0b676b5e" [ 1904.914164] env[63241]: _type = "Task" [ 1904.914164] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.921209] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b84783-d210-a60f-db96-312b0b676b5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.140749] env[63241]: DEBUG nova.network.neutron [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Successfully created port: e59b20e5-cfbf-45bb-beb1-675a18f1cb97 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1905.156387] env[63241]: DEBUG nova.compute.utils [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1905.215677] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821188, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.62438} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.215942] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 779d2380-be6c-4fdb-8755-10e99f8a6fd9/779d2380-be6c-4fdb-8755-10e99f8a6fd9.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1905.216252] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1905.216499] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3068ec6-3d21-4882-ba1d-71694c05ca41 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.222469] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1905.222469] env[63241]: value = "task-1821189" [ 1905.222469] env[63241]: _type = "Task" [ 1905.222469] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.231210] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821189, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.317546] env[63241]: DEBUG oslo_concurrency.lockutils [req-87e54a30-40c7-457e-a79b-f25124817e60 req-14be2eef-e4d1-40e5-8e5b-36a2f3cea211 service nova] Releasing lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.371157] env[63241]: DEBUG nova.compute.manager [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1905.417894] env[63241]: DEBUG nova.network.neutron [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1905.430194] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b84783-d210-a60f-db96-312b0b676b5e, 'name': SearchDatastore_Task, 'duration_secs': 0.009425} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.431071] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9dff51f-32c8-4fa7-a04e-fa94a52902d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.439684] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1905.439684] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52fae662-901f-5b84-63bd-6e43f69162b6" [ 1905.439684] env[63241]: _type = "Task" [ 1905.439684] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.447228] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fae662-901f-5b84-63bd-6e43f69162b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.513878] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14946016-6284-4240-af5a-63a5fe466535 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.523234] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64871323-f342-4e4e-a672-cc3e85ac1fca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.554836] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338122a2-ee6c-48eb-8208-b0634879ec5d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.561806] env[63241]: DEBUG nova.network.neutron [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Updating instance_info_cache with network_info: [{"id": "139ab667-6231-4030-a733-172ac1488ddf", "address": "fa:16:3e:a5:50:94", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap139ab667-62", "ovs_interfaceid": "139ab667-6231-4030-a733-172ac1488ddf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1905.563973] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae7bd38-246a-49f2-b613-3f9cf2423056 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.578803] env[63241]: DEBUG nova.compute.provider_tree [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1905.658837] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.732775] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821189, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061354} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.732908] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1905.733700] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e68559-2206-4b42-8e03-b85e93d738ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.755582] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 779d2380-be6c-4fdb-8755-10e99f8a6fd9/779d2380-be6c-4fdb-8755-10e99f8a6fd9.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1905.755837] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec13ba33-1717-4721-b38f-ab882d5b3185 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.776602] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1905.776602] env[63241]: value = "task-1821190" [ 1905.776602] env[63241]: _type = "Task" [ 1905.776602] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.784440] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821190, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.951400] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fae662-901f-5b84-63bd-6e43f69162b6, 'name': SearchDatastore_Task, 'duration_secs': 0.010755} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.951623] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.951916] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e4514260-dfcc-45a3-80d5-b5484b0b599c/e4514260-dfcc-45a3-80d5-b5484b0b599c.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1905.952196] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d3a939e-bd5f-4b22-b7a2-b98dbfdbe275 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.959340] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1905.959340] env[63241]: value = "task-1821191" [ 1905.959340] env[63241]: _type = "Task" [ 1905.959340] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.966274] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821191, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.069179] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "refresh_cache-26b75825-49c4-4870-957a-a2a76a970880" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1906.069593] env[63241]: DEBUG nova.compute.manager [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Instance network_info: |[{"id": "139ab667-6231-4030-a733-172ac1488ddf", "address": "fa:16:3e:a5:50:94", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap139ab667-62", "ovs_interfaceid": "139ab667-6231-4030-a733-172ac1488ddf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1906.070496] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:50:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '139ab667-6231-4030-a733-172ac1488ddf', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1906.078043] env[63241]: DEBUG oslo.service.loopingcall [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1906.078295] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1906.078521] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aeec7c6d-80d8-4c29-b2ea-81a9aae8a5d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.093319] env[63241]: DEBUG nova.scheduler.client.report [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1906.101879] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1906.101879] env[63241]: value = "task-1821192" [ 1906.101879] env[63241]: _type = "Task" [ 1906.101879] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.109523] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821192, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.224795] env[63241]: DEBUG nova.compute.manager [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Received event network-vif-plugged-139ab667-6231-4030-a733-172ac1488ddf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1906.225040] env[63241]: DEBUG oslo_concurrency.lockutils [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] Acquiring lock "26b75825-49c4-4870-957a-a2a76a970880-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.225261] env[63241]: DEBUG oslo_concurrency.lockutils [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] Lock "26b75825-49c4-4870-957a-a2a76a970880-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.225408] env[63241]: DEBUG oslo_concurrency.lockutils [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] Lock "26b75825-49c4-4870-957a-a2a76a970880-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.225578] env[63241]: DEBUG nova.compute.manager [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] No waiting events found dispatching network-vif-plugged-139ab667-6231-4030-a733-172ac1488ddf {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1906.225743] env[63241]: WARNING nova.compute.manager [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Received unexpected event network-vif-plugged-139ab667-6231-4030-a733-172ac1488ddf for instance with vm_state building and task_state spawning. [ 1906.225942] env[63241]: DEBUG nova.compute.manager [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Received event network-changed-139ab667-6231-4030-a733-172ac1488ddf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1906.226054] env[63241]: DEBUG nova.compute.manager [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Refreshing instance network info cache due to event network-changed-139ab667-6231-4030-a733-172ac1488ddf. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1906.226242] env[63241]: DEBUG oslo_concurrency.lockutils [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] Acquiring lock "refresh_cache-26b75825-49c4-4870-957a-a2a76a970880" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1906.226375] env[63241]: DEBUG oslo_concurrency.lockutils [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] Acquired lock "refresh_cache-26b75825-49c4-4870-957a-a2a76a970880" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1906.226585] env[63241]: DEBUG nova.network.neutron [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Refreshing network info cache for port 139ab667-6231-4030-a733-172ac1488ddf {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1906.287169] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821190, 'name': ReconfigVM_Task, 'duration_secs': 0.297005} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.287480] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 779d2380-be6c-4fdb-8755-10e99f8a6fd9/779d2380-be6c-4fdb-8755-10e99f8a6fd9.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1906.288646] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'boot_index': 0, 'encrypted': False, 'guest_format': None, 'disk_bus': None, 'encryption_format': None, 'encryption_options': None, 'encryption_secret_uuid': None, 'device_type': 'disk', 'size': 0, 'image_id': 'e128f8d9-813d-4846-9a6e-b4c4717cd5b4'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377212', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'name': 'volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '779d2380-be6c-4fdb-8755-10e99f8a6fd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'serial': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a'}, 'boot_index': None, 'attachment_id': 'f066b59a-2ef4-49fc-ab25-987f089328aa', 'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sdb', 'device_type': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=63241) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1906.288870] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1906.289075] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377212', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'name': 'volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '779d2380-be6c-4fdb-8755-10e99f8a6fd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'serial': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1906.289932] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f28d105-062c-46ab-a513-3c016c4fb80f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.307146] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7575d8b5-d54c-4772-bc64-5b6d7702ff98 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.336629] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a/volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1906.336988] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a04385a3-8662-4af0-810c-f5a1d9dc2359 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.356604] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1906.356604] env[63241]: value = "task-1821193" [ 1906.356604] env[63241]: _type = "Task" [ 1906.356604] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.365699] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821193, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.380998] env[63241]: DEBUG nova.compute.manager [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1906.409626] env[63241]: DEBUG nova.virt.hardware [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1906.410032] env[63241]: DEBUG nova.virt.hardware [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1906.410214] env[63241]: DEBUG nova.virt.hardware [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1906.410405] env[63241]: DEBUG nova.virt.hardware [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1906.410553] env[63241]: DEBUG nova.virt.hardware [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1906.410704] env[63241]: DEBUG nova.virt.hardware [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1906.410918] env[63241]: DEBUG nova.virt.hardware [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1906.411094] env[63241]: DEBUG nova.virt.hardware [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1906.411380] env[63241]: DEBUG nova.virt.hardware [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1906.411444] env[63241]: DEBUG nova.virt.hardware [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1906.411590] env[63241]: DEBUG nova.virt.hardware [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1906.412578] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51417c95-6d7e-409e-a7db-f08eae8639b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.423098] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7cdce3-4722-41ab-a7c8-e1df47f3dece {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.468319] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821191, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.598796] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.242s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.599270] env[63241]: DEBUG nova.compute.manager [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1906.602166] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.299s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.603564] env[63241]: INFO nova.compute.claims [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1906.614739] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821192, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.638105] env[63241]: DEBUG nova.network.neutron [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Successfully updated port: e59b20e5-cfbf-45bb-beb1-675a18f1cb97 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1906.747668] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.747894] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.748140] env[63241]: INFO nova.compute.manager [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Attaching volume 3db1f9f8-4a70-4324-92e6-c2279747c925 to /dev/sdb [ 1906.781382] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345d0db4-6747-4184-a38d-8332e7a8a64f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.788967] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44557c52-3972-43a2-8c49-fe07fa0dfecb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.803606] env[63241]: DEBUG nova.virt.block_device [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Updating existing volume attachment record: ff014df2-ea75-4013-a92a-95d122ac28f5 {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1906.866213] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821193, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.920209] env[63241]: DEBUG nova.network.neutron [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Updated VIF entry in instance network info cache for port 139ab667-6231-4030-a733-172ac1488ddf. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1906.920576] env[63241]: DEBUG nova.network.neutron [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Updating instance_info_cache with network_info: [{"id": "139ab667-6231-4030-a733-172ac1488ddf", "address": "fa:16:3e:a5:50:94", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap139ab667-62", "ovs_interfaceid": "139ab667-6231-4030-a733-172ac1488ddf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.970797] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821191, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561824} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.971092] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] e4514260-dfcc-45a3-80d5-b5484b0b599c/e4514260-dfcc-45a3-80d5-b5484b0b599c.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1906.971313] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1906.971560] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d67b76a5-de7f-4b0c-bb3c-b604e2da1790 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.982541] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1906.982541] env[63241]: value = "task-1821195" [ 1906.982541] env[63241]: _type = "Task" [ 1906.982541] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.990754] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.108865] env[63241]: DEBUG nova.compute.utils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1907.110472] env[63241]: DEBUG nova.compute.manager [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1907.110646] env[63241]: DEBUG nova.network.neutron [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1907.125132] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821192, 'name': CreateVM_Task, 'duration_secs': 0.526249} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.125329] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1907.126022] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.126211] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.126532] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1907.127046] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60fb8f80-87d8-4493-a0ab-8fd31118799f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.132063] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1907.132063] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c6f4a8-b33d-4d56-6eb0-0156de15e2d6" [ 1907.132063] env[63241]: _type = "Task" [ 1907.132063] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.140214] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c6f4a8-b33d-4d56-6eb0-0156de15e2d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.141760] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "refresh_cache-77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.141947] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired lock "refresh_cache-77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.142072] env[63241]: DEBUG nova.network.neutron [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1907.155388] env[63241]: DEBUG nova.policy [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c00391826fd242709ad7947610554fc2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '855da29218ba4391a208e2835f60ee11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1907.367590] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821193, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.423702] env[63241]: DEBUG oslo_concurrency.lockutils [req-7f8ed254-ab78-42dd-b5d3-53a80fccc2ff req-826698a3-0d0f-4726-8707-7c4ebf3f3435 service nova] Releasing lock "refresh_cache-26b75825-49c4-4870-957a-a2a76a970880" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.432627] env[63241]: DEBUG nova.network.neutron [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Successfully created port: fbe70abb-a696-4530-893c-079aa3168dc7 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1907.493207] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067142} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.493585] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1907.494302] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13301be-d22c-481f-af4f-9d05ff0c565e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.517374] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] e4514260-dfcc-45a3-80d5-b5484b0b599c/e4514260-dfcc-45a3-80d5-b5484b0b599c.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1907.517648] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f5f7a0f-6c67-41fb-a0a4-8f4ddafc7432 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.537936] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1907.537936] env[63241]: value = "task-1821196" [ 1907.537936] env[63241]: _type = "Task" [ 1907.537936] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.545591] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821196, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.622210] env[63241]: DEBUG nova.compute.manager [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1907.642519] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c6f4a8-b33d-4d56-6eb0-0156de15e2d6, 'name': SearchDatastore_Task, 'duration_secs': 0.010902} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.642748] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.642929] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1907.643259] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.643339] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.643535] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1907.649353] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac472400-baf1-4ada-abb7-718f590285b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.659331] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1907.660344] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1907.660433] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d728e27a-24eb-49e0-b106-3403014ed1ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.667852] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1907.667852] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5202ce30-ea57-c180-edec-0cd559b6a728" [ 1907.667852] env[63241]: _type = "Task" [ 1907.667852] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.683314] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5202ce30-ea57-c180-edec-0cd559b6a728, 'name': SearchDatastore_Task, 'duration_secs': 0.008155} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.684784] env[63241]: DEBUG nova.network.neutron [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1907.686677] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-715015ed-b457-4808-96a6-9dc3e8f985a2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.694666] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1907.694666] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52855a5e-b694-97a3-7eb0-53379e684532" [ 1907.694666] env[63241]: _type = "Task" [ 1907.694666] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.707916] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52855a5e-b694-97a3-7eb0-53379e684532, 'name': SearchDatastore_Task, 'duration_secs': 0.008953} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.710067] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.710342] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 26b75825-49c4-4870-957a-a2a76a970880/26b75825-49c4-4870-957a-a2a76a970880.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1907.710607] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-adf00c65-0f9b-460a-8559-b610b29955ce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.718866] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1907.718866] env[63241]: value = "task-1821197" [ 1907.718866] env[63241]: _type = "Task" [ 1907.718866] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.728697] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.792203] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030479a0-4454-4648-b366-b6839c667036 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.800109] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d1afce-646c-4738-8716-a77f97aec531 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.835707] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7e36c3-1102-48b2-898c-5ca03aad8c7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.844508] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090ca374-995d-4c88-acbf-d7ecf4eb358d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.858287] env[63241]: DEBUG nova.compute.provider_tree [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1907.869902] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821193, 'name': ReconfigVM_Task, 'duration_secs': 1.350574} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.870281] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfigured VM instance instance-00000066 to attach disk [datastore1] volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a/volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1907.876021] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4108c91a-acb8-4197-b108-7b3dd239aa69 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.888802] env[63241]: DEBUG nova.network.neutron [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Updating instance_info_cache with network_info: [{"id": "e59b20e5-cfbf-45bb-beb1-675a18f1cb97", "address": "fa:16:3e:70:2b:34", "network": {"id": "93c05514-d892-481d-9774-124866cb4462", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-742911692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f7af6cf881f84203a7f0a546466bf76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape59b20e5-cf", "ovs_interfaceid": "e59b20e5-cfbf-45bb-beb1-675a18f1cb97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1907.893546] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1907.893546] env[63241]: value = "task-1821198" [ 1907.893546] env[63241]: _type = "Task" [ 1907.893546] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.904150] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821198, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.049223] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821196, 'name': ReconfigVM_Task, 'duration_secs': 0.317526} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.049606] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Reconfigured VM instance instance-0000006d to attach disk [datastore1] e4514260-dfcc-45a3-80d5-b5484b0b599c/e4514260-dfcc-45a3-80d5-b5484b0b599c.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1908.050650] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0978e63-5ebe-45ea-acf6-d08018523d37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.058643] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1908.058643] env[63241]: value = "task-1821199" [ 1908.058643] env[63241]: _type = "Task" [ 1908.058643] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.068330] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821199, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.230165] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821197, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.253538] env[63241]: DEBUG nova.compute.manager [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Received event network-vif-plugged-e59b20e5-cfbf-45bb-beb1-675a18f1cb97 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1908.253741] env[63241]: DEBUG oslo_concurrency.lockutils [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] Acquiring lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.253958] env[63241]: DEBUG oslo_concurrency.lockutils [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] Lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.254170] env[63241]: DEBUG oslo_concurrency.lockutils [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] Lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.254345] env[63241]: DEBUG nova.compute.manager [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] No waiting events found dispatching network-vif-plugged-e59b20e5-cfbf-45bb-beb1-675a18f1cb97 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1908.254510] env[63241]: WARNING nova.compute.manager [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Received unexpected event network-vif-plugged-e59b20e5-cfbf-45bb-beb1-675a18f1cb97 for instance with vm_state building and task_state spawning. [ 1908.254667] env[63241]: DEBUG nova.compute.manager [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Received event network-changed-e59b20e5-cfbf-45bb-beb1-675a18f1cb97 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1908.254853] env[63241]: DEBUG nova.compute.manager [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Refreshing instance network info cache due to event network-changed-e59b20e5-cfbf-45bb-beb1-675a18f1cb97. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1908.255026] env[63241]: DEBUG oslo_concurrency.lockutils [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] Acquiring lock "refresh_cache-77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.361164] env[63241]: DEBUG nova.scheduler.client.report [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1908.394121] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Releasing lock "refresh_cache-77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1908.394121] env[63241]: DEBUG nova.compute.manager [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Instance network_info: |[{"id": "e59b20e5-cfbf-45bb-beb1-675a18f1cb97", "address": "fa:16:3e:70:2b:34", "network": {"id": "93c05514-d892-481d-9774-124866cb4462", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-742911692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f7af6cf881f84203a7f0a546466bf76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape59b20e5-cf", "ovs_interfaceid": "e59b20e5-cfbf-45bb-beb1-675a18f1cb97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1908.394121] env[63241]: DEBUG oslo_concurrency.lockutils [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] Acquired lock "refresh_cache-77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.394121] env[63241]: DEBUG nova.network.neutron [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Refreshing network info cache for port e59b20e5-cfbf-45bb-beb1-675a18f1cb97 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1908.394121] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:2b:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9ec24851-7bb6-426b-b28f-f7b246df1713', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e59b20e5-cfbf-45bb-beb1-675a18f1cb97', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1908.401828] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Creating folder: Project (f7af6cf881f84203a7f0a546466bf76f). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1908.402941] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35369bd7-d126-4d4d-85b1-7fce32f18ad1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.413651] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821198, 'name': ReconfigVM_Task, 'duration_secs': 0.33508} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.413928] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377212', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'name': 'volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '779d2380-be6c-4fdb-8755-10e99f8a6fd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'serial': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1908.414466] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef8c4284-2544-4d4c-95b0-53d2ada18dd2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.417303] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Created folder: Project (f7af6cf881f84203a7f0a546466bf76f) in parent group-v376927. [ 1908.417488] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Creating folder: Instances. Parent ref: group-v377217. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1908.417902] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9dc30b5-9184-4a6e-8cf9-2a3555c53908 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.421065] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1908.421065] env[63241]: value = "task-1821201" [ 1908.421065] env[63241]: _type = "Task" [ 1908.421065] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.426419] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Created folder: Instances in parent group-v377217. [ 1908.426639] env[63241]: DEBUG oslo.service.loopingcall [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1908.429417] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1908.429659] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821201, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.429841] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbcaef21-9642-4ae1-aa3b-8b8f25af4747 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.447869] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1908.447869] env[63241]: value = "task-1821203" [ 1908.447869] env[63241]: _type = "Task" [ 1908.447869] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.458998] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821203, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.289798] env[63241]: DEBUG nova.network.neutron [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Successfully updated port: fbe70abb-a696-4530-893c-079aa3168dc7 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1909.292275] env[63241]: DEBUG nova.compute.manager [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1909.294949] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.295397] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1909.303468] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.972s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.305006] env[63241]: INFO nova.compute.claims [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1909.319203] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821199, 'name': Rename_Task, 'duration_secs': 1.180797} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.329433] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1909.329716] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821197, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527999} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.330226] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821201, 'name': Rename_Task, 'duration_secs': 0.173025} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.330398] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821203, 'name': CreateVM_Task, 'duration_secs': 0.372477} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.332065] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a7da258-ad80-446f-aee0-94b14a43035f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.332434] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 26b75825-49c4-4870-957a-a2a76a970880/26b75825-49c4-4870-957a-a2a76a970880.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1909.332550] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1909.332769] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1909.332911] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1909.333166] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd3b423f-a256-4581-82e5-973084389dce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.334811] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b63b7ab6-5ebf-42f1-ab93-944e0924fc70 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.337601] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.337601] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.337601] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1909.337601] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-396943bd-cd84-4feb-8f1d-bb18ac0905d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.341086] env[63241]: DEBUG nova.virt.hardware [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1909.341332] env[63241]: DEBUG nova.virt.hardware [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1909.341455] env[63241]: DEBUG nova.virt.hardware [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1909.341633] env[63241]: DEBUG nova.virt.hardware [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1909.341777] env[63241]: DEBUG nova.virt.hardware [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1909.341924] env[63241]: DEBUG nova.virt.hardware [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1909.342236] env[63241]: DEBUG nova.virt.hardware [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1909.342419] env[63241]: DEBUG nova.virt.hardware [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1909.342595] env[63241]: DEBUG nova.virt.hardware [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1909.342758] env[63241]: DEBUG nova.virt.hardware [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1909.342930] env[63241]: DEBUG nova.virt.hardware [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1909.345068] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2149180c-9ba0-4ef5-babe-7081b8267939 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.351791] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1909.351791] env[63241]: value = "task-1821205" [ 1909.351791] env[63241]: _type = "Task" [ 1909.351791] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.354544] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1909.354544] env[63241]: value = "task-1821206" [ 1909.354544] env[63241]: _type = "Task" [ 1909.354544] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.355094] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1909.355094] env[63241]: value = "task-1821207" [ 1909.355094] env[63241]: _type = "Task" [ 1909.355094] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.359997] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1909.359997] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52795023-f7a8-2242-e6fa-ffd7b7953ab6" [ 1909.359997] env[63241]: _type = "Task" [ 1909.359997] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.375116] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e400fc20-ae46-478c-b456-859677978c8c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.386403] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821205, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.390084] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821206, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.405293] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821207, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.405659] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52795023-f7a8-2242-e6fa-ffd7b7953ab6, 'name': SearchDatastore_Task, 'duration_secs': 0.018927} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.408988] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.409265] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1909.409536] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.409754] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.409981] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1909.410322] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-592ff757-c5d4-4692-9145-19dfd2d77582 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.420286] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1909.420525] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1909.421399] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b66e92e-09a9-41f9-8b5a-5be6407584cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.427547] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1909.427547] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52cc4852-f52f-476d-4e28-8d069cbf78e9" [ 1909.427547] env[63241]: _type = "Task" [ 1909.427547] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.440480] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52cc4852-f52f-476d-4e28-8d069cbf78e9, 'name': SearchDatastore_Task, 'duration_secs': 0.00826} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.441365] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94c35504-3785-4814-9bc4-836744bb97d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.446978] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1909.446978] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524da08a-4251-febf-8238-08b431d3246a" [ 1909.446978] env[63241]: _type = "Task" [ 1909.446978] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.456904] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524da08a-4251-febf-8238-08b431d3246a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.543024] env[63241]: DEBUG nova.network.neutron [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Updated VIF entry in instance network info cache for port e59b20e5-cfbf-45bb-beb1-675a18f1cb97. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1909.543461] env[63241]: DEBUG nova.network.neutron [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Updating instance_info_cache with network_info: [{"id": "e59b20e5-cfbf-45bb-beb1-675a18f1cb97", "address": "fa:16:3e:70:2b:34", "network": {"id": "93c05514-d892-481d-9774-124866cb4462", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-742911692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f7af6cf881f84203a7f0a546466bf76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape59b20e5-cf", "ovs_interfaceid": "e59b20e5-cfbf-45bb-beb1-675a18f1cb97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1909.799801] env[63241]: DEBUG nova.compute.utils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1909.801910] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1909.803011] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1909.804868] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-215f658f-2af6-4525-b94c-489ad794e6f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.805015] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-215f658f-2af6-4525-b94c-489ad794e6f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.805152] env[63241]: DEBUG nova.network.neutron [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1909.846996] env[63241]: DEBUG nova.policy [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ed069bdb22e40409ad6e3ea2da9dd8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4642d232d037477ba8813b56e579d84f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1909.863205] env[63241]: DEBUG oslo_vmware.api [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821205, 'name': PowerOnVM_Task, 'duration_secs': 0.462273} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.868692] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1909.868908] env[63241]: INFO nova.compute.manager [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Took 8.00 seconds to spawn the instance on the hypervisor. [ 1909.869111] env[63241]: DEBUG nova.compute.manager [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1909.869839] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab02375-a548-4067-b3ab-f1d8ee7996ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.876892] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821207, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074851} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.880789] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1909.883932] env[63241]: DEBUG oslo_vmware.api [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821206, 'name': PowerOnVM_Task, 'duration_secs': 0.49158} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.884622] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d77a50b-bb29-49a1-988c-3944d61f6229 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.888189] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1909.888410] env[63241]: DEBUG nova.compute.manager [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1909.889325] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864b0fca-f622-4fd5-aa16-414ceae40e30 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.915506] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 26b75825-49c4-4870-957a-a2a76a970880/26b75825-49c4-4870-957a-a2a76a970880.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1909.915816] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d30b7d3f-1d7a-415a-aa37-712efd2dd609 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.936931] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1909.936931] env[63241]: value = "task-1821208" [ 1909.936931] env[63241]: _type = "Task" [ 1909.936931] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.947858] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821208, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.957192] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524da08a-4251-febf-8238-08b431d3246a, 'name': SearchDatastore_Task, 'duration_secs': 0.009494} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.957493] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.957830] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0/77c501b6-9ef7-4ad9-9013-7bf6b773f2e0.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1909.958120] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b9868aa-991f-4763-b37f-28d149666378 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.965024] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1909.965024] env[63241]: value = "task-1821209" [ 1909.965024] env[63241]: _type = "Task" [ 1909.965024] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.974156] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821209, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.046174] env[63241]: DEBUG oslo_concurrency.lockutils [req-0456b743-6e7f-4b2f-8303-e3d4543696ce req-ebfd46f6-0127-4769-9787-e0a8086a9ea6 service nova] Releasing lock "refresh_cache-77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1910.104416] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Successfully created port: 5695a20c-9e6c-4223-bc78-d5a80286154f {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1910.289167] env[63241]: DEBUG nova.compute.manager [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Received event network-vif-plugged-fbe70abb-a696-4530-893c-079aa3168dc7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1910.289387] env[63241]: DEBUG oslo_concurrency.lockutils [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] Acquiring lock "215f658f-2af6-4525-b94c-489ad794e6f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.289618] env[63241]: DEBUG oslo_concurrency.lockutils [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] Lock "215f658f-2af6-4525-b94c-489ad794e6f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.289793] env[63241]: DEBUG oslo_concurrency.lockutils [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] Lock "215f658f-2af6-4525-b94c-489ad794e6f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.289976] env[63241]: DEBUG nova.compute.manager [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] No waiting events found dispatching network-vif-plugged-fbe70abb-a696-4530-893c-079aa3168dc7 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1910.290159] env[63241]: WARNING nova.compute.manager [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Received unexpected event network-vif-plugged-fbe70abb-a696-4530-893c-079aa3168dc7 for instance with vm_state building and task_state spawning. [ 1910.290327] env[63241]: DEBUG nova.compute.manager [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Received event network-changed-fbe70abb-a696-4530-893c-079aa3168dc7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1910.290470] env[63241]: DEBUG nova.compute.manager [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Refreshing instance network info cache due to event network-changed-fbe70abb-a696-4530-893c-079aa3168dc7. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1910.290634] env[63241]: DEBUG oslo_concurrency.lockutils [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] Acquiring lock "refresh_cache-215f658f-2af6-4525-b94c-489ad794e6f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1910.308990] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1910.366408] env[63241]: DEBUG nova.network.neutron [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1910.405219] env[63241]: INFO nova.compute.manager [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Took 15.79 seconds to build instance. [ 1910.406255] env[63241]: DEBUG oslo_concurrency.lockutils [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.449021] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821208, 'name': ReconfigVM_Task, 'duration_secs': 0.339249} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.451203] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 26b75825-49c4-4870-957a-a2a76a970880/26b75825-49c4-4870-957a-a2a76a970880.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1910.451829] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce21c702-b2dd-4b61-9d42-0a9de7fd1898 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.465292] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1910.465292] env[63241]: value = "task-1821210" [ 1910.465292] env[63241]: _type = "Task" [ 1910.465292] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.481033] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821210, 'name': Rename_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.482896] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821209, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.440686} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.485907] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0/77c501b6-9ef7-4ad9-9013-7bf6b773f2e0.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1910.485907] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1910.486137] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c862adc-f384-461c-9a22-f72032b463f1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.494236] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1910.494236] env[63241]: value = "task-1821211" [ 1910.494236] env[63241]: _type = "Task" [ 1910.494236] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.499478] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ad700d-60f7-4480-829f-3e875d08d98f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.507598] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821211, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.510804] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef4c784-804d-4fb4-87e4-ea75484ead2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.546307] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364274c0-e897-4738-8ba0-cfe7b21b7109 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.554034] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90d9949-1224-48e6-9748-19e2a3187bd5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.567633] env[63241]: DEBUG nova.compute.provider_tree [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1910.615814] env[63241]: DEBUG nova.network.neutron [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Updating instance_info_cache with network_info: [{"id": "fbe70abb-a696-4530-893c-079aa3168dc7", "address": "fa:16:3e:b2:b4:05", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe70abb-a6", "ovs_interfaceid": "fbe70abb-a696-4530-893c-079aa3168dc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1910.907948] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2943d859-464c-4bef-a65c-c35404274b0b tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.303s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.976901] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821210, 'name': Rename_Task, 'duration_secs': 0.1845} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.977190] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1910.977446] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c3db7da-e1c4-4b12-8c97-e1e4cd303767 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.984464] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1910.984464] env[63241]: value = "task-1821212" [ 1910.984464] env[63241]: _type = "Task" [ 1910.984464] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.991887] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821212, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.002216] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821211, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077486} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.002459] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1911.003212] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becf8d74-6600-49d2-ab0a-bd770c06f4c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.025220] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0/77c501b6-9ef7-4ad9-9013-7bf6b773f2e0.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1911.025220] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61d3805f-2839-4148-9cec-2b4c3b5ec94b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.052954] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1911.052954] env[63241]: value = "task-1821213" [ 1911.052954] env[63241]: _type = "Task" [ 1911.052954] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.063580] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821213, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.070941] env[63241]: DEBUG nova.scheduler.client.report [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1911.118756] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-215f658f-2af6-4525-b94c-489ad794e6f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.119125] env[63241]: DEBUG nova.compute.manager [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Instance network_info: |[{"id": "fbe70abb-a696-4530-893c-079aa3168dc7", "address": "fa:16:3e:b2:b4:05", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe70abb-a6", "ovs_interfaceid": "fbe70abb-a696-4530-893c-079aa3168dc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1911.119497] env[63241]: DEBUG oslo_concurrency.lockutils [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] Acquired lock "refresh_cache-215f658f-2af6-4525-b94c-489ad794e6f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.119621] env[63241]: DEBUG nova.network.neutron [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Refreshing network info cache for port fbe70abb-a696-4530-893c-079aa3168dc7 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1911.121251] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:b4:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbe70abb-a696-4530-893c-079aa3168dc7', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1911.130607] env[63241]: DEBUG oslo.service.loopingcall [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1911.130607] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1911.130607] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-114ad519-6f9b-4db5-9f19-11c70221900d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.151269] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1911.151269] env[63241]: value = "task-1821214" [ 1911.151269] env[63241]: _type = "Task" [ 1911.151269] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.156813] env[63241]: DEBUG nova.compute.manager [req-62ff0200-03ca-4e46-9847-78c41dd0f29a req-6b9432fe-820b-450b-8aae-bb01910c1776 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Received event network-changed-249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1911.157015] env[63241]: DEBUG nova.compute.manager [req-62ff0200-03ca-4e46-9847-78c41dd0f29a req-6b9432fe-820b-450b-8aae-bb01910c1776 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Refreshing instance network info cache due to event network-changed-249e56d5-0dc5-4bab-9179-ca69f7024104. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1911.157227] env[63241]: DEBUG oslo_concurrency.lockutils [req-62ff0200-03ca-4e46-9847-78c41dd0f29a req-6b9432fe-820b-450b-8aae-bb01910c1776 service nova] Acquiring lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.157373] env[63241]: DEBUG oslo_concurrency.lockutils [req-62ff0200-03ca-4e46-9847-78c41dd0f29a req-6b9432fe-820b-450b-8aae-bb01910c1776 service nova] Acquired lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.157554] env[63241]: DEBUG nova.network.neutron [req-62ff0200-03ca-4e46-9847-78c41dd0f29a req-6b9432fe-820b-450b-8aae-bb01910c1776 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Refreshing network info cache for port 249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1911.164806] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821214, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.320865] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1911.348849] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1911.349154] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377216', 'volume_id': '3db1f9f8-4a70-4324-92e6-c2279747c925', 'name': 'volume-3db1f9f8-4a70-4324-92e6-c2279747c925', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0ea8cf6-4023-4093-b0bc-67b02604125a', 'attached_at': '', 'detached_at': '', 'volume_id': '3db1f9f8-4a70-4324-92e6-c2279747c925', 'serial': '3db1f9f8-4a70-4324-92e6-c2279747c925'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1911.350218] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5c8f0c-16fe-405e-ab3e-dabb99d2d3a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.355818] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1911.356089] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1911.356282] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1911.356523] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1911.356675] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1911.356840] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1911.357084] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1911.357291] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1911.357472] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1911.357646] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1911.357832] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1911.358735] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b258d6-97b4-483b-87c0-6b37e9198131 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.378346] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f41cf42-5bc3-4520-a06d-faaa03244cbf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.382072] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8085a84f-3458-4a93-9b2d-1638f7f1cf26 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.419219] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] volume-3db1f9f8-4a70-4324-92e6-c2279747c925/volume-3db1f9f8-4a70-4324-92e6-c2279747c925.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1911.419219] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c179e796-5915-4883-8ae1-2d3ccfe6abd6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.438412] env[63241]: DEBUG oslo_vmware.api [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1911.438412] env[63241]: value = "task-1821215" [ 1911.438412] env[63241]: _type = "Task" [ 1911.438412] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.447189] env[63241]: DEBUG oslo_vmware.api [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821215, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.494609] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821212, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.563711] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821213, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.577729] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.274s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.578275] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1911.580958] env[63241]: DEBUG oslo_concurrency.lockutils [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.395s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.581194] env[63241]: DEBUG nova.objects.instance [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lazy-loading 'resources' on Instance uuid fb5d60fa-fa13-44a1-8291-4645761a0c80 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1911.643429] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Successfully updated port: 5695a20c-9e6c-4223-bc78-d5a80286154f {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1911.665212] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821214, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.855498] env[63241]: DEBUG nova.network.neutron [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Updated VIF entry in instance network info cache for port fbe70abb-a696-4530-893c-079aa3168dc7. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1911.855847] env[63241]: DEBUG nova.network.neutron [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Updating instance_info_cache with network_info: [{"id": "fbe70abb-a696-4530-893c-079aa3168dc7", "address": "fa:16:3e:b2:b4:05", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe70abb-a6", "ovs_interfaceid": "fbe70abb-a696-4530-893c-079aa3168dc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.886137] env[63241]: DEBUG nova.network.neutron [req-62ff0200-03ca-4e46-9847-78c41dd0f29a req-6b9432fe-820b-450b-8aae-bb01910c1776 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updated VIF entry in instance network info cache for port 249e56d5-0dc5-4bab-9179-ca69f7024104. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1911.886528] env[63241]: DEBUG nova.network.neutron [req-62ff0200-03ca-4e46-9847-78c41dd0f29a req-6b9432fe-820b-450b-8aae-bb01910c1776 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updating instance_info_cache with network_info: [{"id": "249e56d5-0dc5-4bab-9179-ca69f7024104", "address": "fa:16:3e:85:eb:39", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap249e56d5-0d", "ovs_interfaceid": "249e56d5-0dc5-4bab-9179-ca69f7024104", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.948439] env[63241]: DEBUG oslo_vmware.api [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821215, 'name': ReconfigVM_Task, 'duration_secs': 0.330651} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.948708] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Reconfigured VM instance instance-0000006a to attach disk [datastore1] volume-3db1f9f8-4a70-4324-92e6-c2279747c925/volume-3db1f9f8-4a70-4324-92e6-c2279747c925.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1911.953642] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d394041c-c88d-479b-a5eb-bf40d3485884 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.968760] env[63241]: DEBUG oslo_vmware.api [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1911.968760] env[63241]: value = "task-1821216" [ 1911.968760] env[63241]: _type = "Task" [ 1911.968760] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.976743] env[63241]: DEBUG oslo_vmware.api [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821216, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.993871] env[63241]: DEBUG oslo_vmware.api [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821212, 'name': PowerOnVM_Task, 'duration_secs': 0.855833} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.994130] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1911.994338] env[63241]: INFO nova.compute.manager [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Took 7.87 seconds to spawn the instance on the hypervisor. [ 1911.994523] env[63241]: DEBUG nova.compute.manager [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1911.995360] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae54453-e360-4345-a989-9e4d1df0c09d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.063717] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821213, 'name': ReconfigVM_Task, 'duration_secs': 0.718274} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.064057] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0/77c501b6-9ef7-4ad9-9013-7bf6b773f2e0.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1912.064787] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1da01a72-39fc-4882-b9f0-3541e4526c3c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.072216] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1912.072216] env[63241]: value = "task-1821217" [ 1912.072216] env[63241]: _type = "Task" [ 1912.072216] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.087544] env[63241]: DEBUG nova.compute.utils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1912.088772] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821217, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.089482] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1912.089482] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1912.131693] env[63241]: DEBUG nova.policy [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ed069bdb22e40409ad6e3ea2da9dd8a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4642d232d037477ba8813b56e579d84f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1912.146063] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "refresh_cache-c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.146202] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "refresh_cache-c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.146350] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1912.164874] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821214, 'name': CreateVM_Task, 'duration_secs': 0.616359} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.165060] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1912.165745] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.165966] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.166358] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1912.166637] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0873a6a9-e825-4e8f-8f9e-240b4b7b030a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.172945] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1912.172945] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5253d055-1e50-51f8-daff-f7ef1439e84e" [ 1912.172945] env[63241]: _type = "Task" [ 1912.172945] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.181274] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5253d055-1e50-51f8-daff-f7ef1439e84e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.232566] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd67d0ef-71b9-4074-b345-774e28e16cb4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.240316] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1738b5fd-ee3f-49cc-a2fd-0ff0bcd0b0cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.270437] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7d9170-51d1-47c2-a6f3-ae27b8fd8020 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.278451] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231dc963-55dc-4d41-9dfc-3e51caead4b3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.292989] env[63241]: DEBUG nova.compute.provider_tree [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1912.359490] env[63241]: DEBUG oslo_concurrency.lockutils [req-40588893-b655-4eae-946f-747dbc2fe276 req-d7260233-03a0-47ae-b6e6-98fe173f6e5e service nova] Releasing lock "refresh_cache-215f658f-2af6-4525-b94c-489ad794e6f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.388972] env[63241]: DEBUG oslo_concurrency.lockutils [req-62ff0200-03ca-4e46-9847-78c41dd0f29a req-6b9432fe-820b-450b-8aae-bb01910c1776 service nova] Releasing lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.438871] env[63241]: DEBUG nova.compute.manager [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Received event network-vif-plugged-5695a20c-9e6c-4223-bc78-d5a80286154f {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1912.439174] env[63241]: DEBUG oslo_concurrency.lockutils [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] Acquiring lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.439450] env[63241]: DEBUG oslo_concurrency.lockutils [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] Lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.439543] env[63241]: DEBUG oslo_concurrency.lockutils [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] Lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.439727] env[63241]: DEBUG nova.compute.manager [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] No waiting events found dispatching network-vif-plugged-5695a20c-9e6c-4223-bc78-d5a80286154f {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1912.439994] env[63241]: WARNING nova.compute.manager [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Received unexpected event network-vif-plugged-5695a20c-9e6c-4223-bc78-d5a80286154f for instance with vm_state building and task_state spawning. [ 1912.440119] env[63241]: DEBUG nova.compute.manager [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Received event network-changed-5695a20c-9e6c-4223-bc78-d5a80286154f {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1912.440279] env[63241]: DEBUG nova.compute.manager [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Refreshing instance network info cache due to event network-changed-5695a20c-9e6c-4223-bc78-d5a80286154f. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1912.440466] env[63241]: DEBUG oslo_concurrency.lockutils [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] Acquiring lock "refresh_cache-c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.481703] env[63241]: DEBUG oslo_vmware.api [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821216, 'name': ReconfigVM_Task, 'duration_secs': 0.138411} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.482345] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377216', 'volume_id': '3db1f9f8-4a70-4324-92e6-c2279747c925', 'name': 'volume-3db1f9f8-4a70-4324-92e6-c2279747c925', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0ea8cf6-4023-4093-b0bc-67b02604125a', 'attached_at': '', 'detached_at': '', 'volume_id': '3db1f9f8-4a70-4324-92e6-c2279747c925', 'serial': '3db1f9f8-4a70-4324-92e6-c2279747c925'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1912.511523] env[63241]: INFO nova.compute.manager [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Took 15.94 seconds to build instance. [ 1912.583444] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821217, 'name': Rename_Task, 'duration_secs': 0.282535} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.583748] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1912.584017] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f2ec1bf-ae80-40cb-868d-cae4fd10cb60 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.590953] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1912.590953] env[63241]: value = "task-1821218" [ 1912.590953] env[63241]: _type = "Task" [ 1912.590953] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.594638] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1912.605009] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.612312] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Successfully created port: 1a0ff10e-14d3-4b93-b20e-f76c3b7e1207 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1912.684422] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5253d055-1e50-51f8-daff-f7ef1439e84e, 'name': SearchDatastore_Task, 'duration_secs': 0.00885} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.684758] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.685038] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1912.685309] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.685473] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.685660] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1912.685949] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2019e5a5-10c6-49ec-81b7-0212d3b09bd1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.694747] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1912.694925] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1912.695721] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8634d940-891e-4855-85c9-430e7bbb83df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.701386] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1912.701386] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5228d55f-c7ba-4b0e-4e2e-97be1285ea69" [ 1912.701386] env[63241]: _type = "Task" [ 1912.701386] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.710953] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5228d55f-c7ba-4b0e-4e2e-97be1285ea69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.734458] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1912.799527] env[63241]: DEBUG nova.scheduler.client.report [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1912.994275] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Updating instance_info_cache with network_info: [{"id": "5695a20c-9e6c-4223-bc78-d5a80286154f", "address": "fa:16:3e:25:02:c0", "network": {"id": "79a57807-64a6-47dd-be67-52202730f6e8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1001588857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4642d232d037477ba8813b56e579d84f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5695a20c-9e", "ovs_interfaceid": "5695a20c-9e6c-4223-bc78-d5a80286154f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.014556] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f2207bd1-a0b2-495f-b47e-e0d4ff0b29fd tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.454s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.105707] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821218, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.213192] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5228d55f-c7ba-4b0e-4e2e-97be1285ea69, 'name': SearchDatastore_Task, 'duration_secs': 0.01052} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.213339] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91a0ea9f-a3af-4d14-9971-866f45fffb60 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.218512] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1913.218512] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]521cc50f-3b43-fce6-c46a-46f8051b175d" [ 1913.218512] env[63241]: _type = "Task" [ 1913.218512] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.226588] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521cc50f-3b43-fce6-c46a-46f8051b175d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.313399] env[63241]: DEBUG oslo_concurrency.lockutils [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.732s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.315828] env[63241]: DEBUG oslo_concurrency.lockutils [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.910s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.316033] env[63241]: DEBUG nova.objects.instance [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1913.338496] env[63241]: INFO nova.scheduler.client.report [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Deleted allocations for instance fb5d60fa-fa13-44a1-8291-4645761a0c80 [ 1913.497217] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "refresh_cache-c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.497494] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Instance network_info: |[{"id": "5695a20c-9e6c-4223-bc78-d5a80286154f", "address": "fa:16:3e:25:02:c0", "network": {"id": "79a57807-64a6-47dd-be67-52202730f6e8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1001588857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4642d232d037477ba8813b56e579d84f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5695a20c-9e", "ovs_interfaceid": "5695a20c-9e6c-4223-bc78-d5a80286154f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1913.497796] env[63241]: DEBUG oslo_concurrency.lockutils [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] Acquired lock "refresh_cache-c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.497982] env[63241]: DEBUG nova.network.neutron [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Refreshing network info cache for port 5695a20c-9e6c-4223-bc78-d5a80286154f {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1913.499156] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:02:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e885ebd4-93ca-4e9e-8889-0f16bd91e61e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5695a20c-9e6c-4223-bc78-d5a80286154f', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1913.507022] env[63241]: DEBUG oslo.service.loopingcall [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1913.508214] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1913.508505] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2d0ac47-dd35-4a6b-8221-7ed3c92671ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.524125] env[63241]: DEBUG nova.objects.instance [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'flavor' on Instance uuid c0ea8cf6-4023-4093-b0bc-67b02604125a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1913.530823] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1913.530823] env[63241]: value = "task-1821219" [ 1913.530823] env[63241]: _type = "Task" [ 1913.530823] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.540081] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821219, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.603636] env[63241]: DEBUG oslo_vmware.api [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821218, 'name': PowerOnVM_Task, 'duration_secs': 0.893311} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.604584] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1913.604896] env[63241]: INFO nova.compute.manager [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Took 7.22 seconds to spawn the instance on the hypervisor. [ 1913.605150] env[63241]: DEBUG nova.compute.manager [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1913.606509] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd27f755-c6f8-478f-985f-1267929e8270 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.610355] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1913.645223] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1913.645683] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1913.645683] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1913.645817] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1913.645958] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1913.646214] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1913.646512] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1913.646723] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1913.646908] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1913.647114] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1913.647362] env[63241]: DEBUG nova.virt.hardware [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1913.648260] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb03740a-e52e-4558-848f-5a94d42803dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.656914] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215c81e9-6f20-4fd4-ac2b-0a47ec3f055e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.728205] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]521cc50f-3b43-fce6-c46a-46f8051b175d, 'name': SearchDatastore_Task, 'duration_secs': 0.009604} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.728460] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.728714] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 215f658f-2af6-4525-b94c-489ad794e6f7/215f658f-2af6-4525-b94c-489ad794e6f7.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1913.728961] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4abc2acb-1378-4e11-9770-70a3ca785f50 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.735513] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1913.735513] env[63241]: value = "task-1821220" [ 1913.735513] env[63241]: _type = "Task" [ 1913.735513] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.742878] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.851265] env[63241]: DEBUG oslo_concurrency.lockutils [None req-21ed55f3-1f04-4953-a3e2-b954bbb22b1e tempest-ServerRescueNegativeTestJSON-861011626 tempest-ServerRescueNegativeTestJSON-861011626-project-member] Lock "fb5d60fa-fa13-44a1-8291-4645761a0c80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.869s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.031122] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c7faced0-6b11-42ed-9f78-e4bca953a02b tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.283s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.045831] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821219, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.132591] env[63241]: INFO nova.compute.manager [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Took 16.48 seconds to build instance. [ 1914.154849] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Successfully updated port: 1a0ff10e-14d3-4b93-b20e-f76c3b7e1207 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1914.245915] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821220, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506075} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.246187] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 215f658f-2af6-4525-b94c-489ad794e6f7/215f658f-2af6-4525-b94c-489ad794e6f7.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1914.246459] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1914.246704] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3928b79-3df0-4454-92da-9c96a4fa3e65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.256122] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1914.256122] env[63241]: value = "task-1821221" [ 1914.256122] env[63241]: _type = "Task" [ 1914.256122] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.264319] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821221, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.322561] env[63241]: DEBUG nova.network.neutron [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Updated VIF entry in instance network info cache for port 5695a20c-9e6c-4223-bc78-d5a80286154f. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1914.323043] env[63241]: DEBUG nova.network.neutron [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Updating instance_info_cache with network_info: [{"id": "5695a20c-9e6c-4223-bc78-d5a80286154f", "address": "fa:16:3e:25:02:c0", "network": {"id": "79a57807-64a6-47dd-be67-52202730f6e8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1001588857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4642d232d037477ba8813b56e579d84f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5695a20c-9e", "ovs_interfaceid": "5695a20c-9e6c-4223-bc78-d5a80286154f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.326036] env[63241]: DEBUG oslo_concurrency.lockutils [None req-313ad694-b0c2-4a9f-ad74-4b6653259d8e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.476681] env[63241]: DEBUG nova.compute.manager [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Received event network-changed-139ab667-6231-4030-a733-172ac1488ddf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1914.477048] env[63241]: DEBUG nova.compute.manager [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Refreshing instance network info cache due to event network-changed-139ab667-6231-4030-a733-172ac1488ddf. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1914.477407] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] Acquiring lock "refresh_cache-26b75825-49c4-4870-957a-a2a76a970880" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.477682] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] Acquired lock "refresh_cache-26b75825-49c4-4870-957a-a2a76a970880" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.477976] env[63241]: DEBUG nova.network.neutron [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Refreshing network info cache for port 139ab667-6231-4030-a733-172ac1488ddf {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1914.507954] env[63241]: INFO nova.compute.manager [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Rescuing [ 1914.508260] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "refresh_cache-77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.508427] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired lock "refresh_cache-77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.508582] env[63241]: DEBUG nova.network.neutron [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1914.543405] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821219, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.634431] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3bdfbd26-a132-4abd-8c30-29a8c4e757e5 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.995s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.659109] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "refresh_cache-eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.659109] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "refresh_cache-eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.659109] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1914.770284] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821221, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06891} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.770728] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1914.771977] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c194adfa-4f42-40c0-85fc-e7902a75876c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.794837] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 215f658f-2af6-4525-b94c-489ad794e6f7/215f658f-2af6-4525-b94c-489ad794e6f7.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1914.795128] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a64259a2-955b-4890-84c4-80c5f1bd62f4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.815506] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1914.815506] env[63241]: value = "task-1821222" [ 1914.815506] env[63241]: _type = "Task" [ 1914.815506] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.825829] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821222, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.825829] env[63241]: DEBUG oslo_concurrency.lockutils [req-ed7bac69-9148-46a0-966d-8474fbddbb17 req-fd4805e0-9e51-445d-913c-c0656b1142e6 service nova] Releasing lock "refresh_cache-c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.949782] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.950234] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.052206] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821219, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.224378] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1915.247881] env[63241]: DEBUG nova.network.neutron [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Updating instance_info_cache with network_info: [{"id": "e59b20e5-cfbf-45bb-beb1-675a18f1cb97", "address": "fa:16:3e:70:2b:34", "network": {"id": "93c05514-d892-481d-9774-124866cb4462", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-742911692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f7af6cf881f84203a7f0a546466bf76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape59b20e5-cf", "ovs_interfaceid": "e59b20e5-cfbf-45bb-beb1-675a18f1cb97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1915.333802] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821222, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.453504] env[63241]: DEBUG nova.compute.utils [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1915.533518] env[63241]: DEBUG nova.network.neutron [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Updated VIF entry in instance network info cache for port 139ab667-6231-4030-a733-172ac1488ddf. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1915.533947] env[63241]: DEBUG nova.network.neutron [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Updating instance_info_cache with network_info: [{"id": "139ab667-6231-4030-a733-172ac1488ddf", "address": "fa:16:3e:a5:50:94", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap139ab667-62", "ovs_interfaceid": "139ab667-6231-4030-a733-172ac1488ddf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1915.544352] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821219, 'name': CreateVM_Task, 'duration_secs': 1.724837} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.544514] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1915.545162] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.545327] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.545643] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1915.546845] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22944628-8969-47be-814c-31ab4fba734e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.552247] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1915.552247] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524d5d23-d2a0-573d-7fc3-055200fa685a" [ 1915.552247] env[63241]: _type = "Task" [ 1915.552247] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.560362] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524d5d23-d2a0-573d-7fc3-055200fa685a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.586542] env[63241]: DEBUG nova.network.neutron [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Updating instance_info_cache with network_info: [{"id": "1a0ff10e-14d3-4b93-b20e-f76c3b7e1207", "address": "fa:16:3e:a9:98:57", "network": {"id": "79a57807-64a6-47dd-be67-52202730f6e8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1001588857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4642d232d037477ba8813b56e579d84f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a0ff10e-14", "ovs_interfaceid": "1a0ff10e-14d3-4b93-b20e-f76c3b7e1207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1915.751183] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Releasing lock "refresh_cache-77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.830300] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821222, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.956378] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.037448] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] Releasing lock "refresh_cache-26b75825-49c4-4870-957a-a2a76a970880" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.037771] env[63241]: DEBUG nova.compute.manager [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Received event network-vif-plugged-1a0ff10e-14d3-4b93-b20e-f76c3b7e1207 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1916.038011] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] Acquiring lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.038276] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] Lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.038490] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] Lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.038696] env[63241]: DEBUG nova.compute.manager [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] No waiting events found dispatching network-vif-plugged-1a0ff10e-14d3-4b93-b20e-f76c3b7e1207 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1916.038919] env[63241]: WARNING nova.compute.manager [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Received unexpected event network-vif-plugged-1a0ff10e-14d3-4b93-b20e-f76c3b7e1207 for instance with vm_state building and task_state spawning. [ 1916.039166] env[63241]: DEBUG nova.compute.manager [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Received event network-changed-1a0ff10e-14d3-4b93-b20e-f76c3b7e1207 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1916.039433] env[63241]: DEBUG nova.compute.manager [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Refreshing instance network info cache due to event network-changed-1a0ff10e-14d3-4b93-b20e-f76c3b7e1207. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1916.039616] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] Acquiring lock "refresh_cache-eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.063471] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524d5d23-d2a0-573d-7fc3-055200fa685a, 'name': SearchDatastore_Task, 'duration_secs': 0.010412} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.063742] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.065161] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1916.065161] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.065161] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.065161] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1916.065161] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b77b528-2afd-4052-bd7f-d9a889f7af49 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.073046] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1916.073218] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1916.073995] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0075556e-af02-4373-aae3-a97716acd45c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.079355] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1916.079355] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]525bbd13-36a4-df8b-ba95-5d68bc9b6649" [ 1916.079355] env[63241]: _type = "Task" [ 1916.079355] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.087865] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525bbd13-36a4-df8b-ba95-5d68bc9b6649, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.089100] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "refresh_cache-eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.089380] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Instance network_info: |[{"id": "1a0ff10e-14d3-4b93-b20e-f76c3b7e1207", "address": "fa:16:3e:a9:98:57", "network": {"id": "79a57807-64a6-47dd-be67-52202730f6e8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1001588857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4642d232d037477ba8813b56e579d84f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a0ff10e-14", "ovs_interfaceid": "1a0ff10e-14d3-4b93-b20e-f76c3b7e1207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1916.089658] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] Acquired lock "refresh_cache-eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.089836] env[63241]: DEBUG nova.network.neutron [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Refreshing network info cache for port 1a0ff10e-14d3-4b93-b20e-f76c3b7e1207 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1916.091031] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:98:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e885ebd4-93ca-4e9e-8889-0f16bd91e61e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a0ff10e-14d3-4b93-b20e-f76c3b7e1207', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1916.099496] env[63241]: DEBUG oslo.service.loopingcall [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1916.099976] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1916.100338] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5edba298-f49d-4775-a154-99576bb2c0c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.121505] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1916.121505] env[63241]: value = "task-1821223" [ 1916.121505] env[63241]: _type = "Task" [ 1916.121505] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.129963] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.285445] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1916.285729] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1e3bb02-c680-4cf7-9a90-96ee167f18cb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.293335] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1916.293335] env[63241]: value = "task-1821224" [ 1916.293335] env[63241]: _type = "Task" [ 1916.293335] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.302955] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.330320] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821222, 'name': ReconfigVM_Task, 'duration_secs': 1.232008} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.330618] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 215f658f-2af6-4525-b94c-489ad794e6f7/215f658f-2af6-4525-b94c-489ad794e6f7.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1916.331281] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09099aab-0df9-41d0-9b87-443d56c4a8a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.338320] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1916.338320] env[63241]: value = "task-1821225" [ 1916.338320] env[63241]: _type = "Task" [ 1916.338320] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.346806] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821225, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.591051] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525bbd13-36a4-df8b-ba95-5d68bc9b6649, 'name': SearchDatastore_Task, 'duration_secs': 0.008809} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.591690] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25e81a47-f800-4d36-95df-008a40a14e36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.596959] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1916.596959] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a8f166-6230-24b4-fd85-653d4eadc688" [ 1916.596959] env[63241]: _type = "Task" [ 1916.596959] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.607261] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a8f166-6230-24b4-fd85-653d4eadc688, 'name': SearchDatastore_Task} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.607510] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.607763] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a/c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1916.608017] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19183a33-11b2-4c89-bf0d-3d3aab5ebe48 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.614693] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1916.614693] env[63241]: value = "task-1821226" [ 1916.614693] env[63241]: _type = "Task" [ 1916.614693] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.622961] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821226, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.631010] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.804233] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.852422] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821225, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.944265] env[63241]: DEBUG nova.network.neutron [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Updated VIF entry in instance network info cache for port 1a0ff10e-14d3-4b93-b20e-f76c3b7e1207. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1916.944691] env[63241]: DEBUG nova.network.neutron [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Updating instance_info_cache with network_info: [{"id": "1a0ff10e-14d3-4b93-b20e-f76c3b7e1207", "address": "fa:16:3e:a9:98:57", "network": {"id": "79a57807-64a6-47dd-be67-52202730f6e8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1001588857-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4642d232d037477ba8813b56e579d84f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a0ff10e-14", "ovs_interfaceid": "1a0ff10e-14d3-4b93-b20e-f76c3b7e1207", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.027747] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.028293] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.028636] env[63241]: INFO nova.compute.manager [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Attaching volume 3db4b146-c795-4d6d-8983-4d7672e1b19d to /dev/sdc [ 1917.071699] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6fd4e2-e363-4178-a8c8-5b5f87e131ee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.079761] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cf1a87-ef54-4d37-a957-2ed53a78c13d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.093310] env[63241]: DEBUG nova.virt.block_device [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Updating existing volume attachment record: 6d6e7bc8-d22a-4d7f-a550-0b54c14fe00d {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1917.126989] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821226, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458339} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.130300] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a/c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1917.130542] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1917.130857] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-160ecf5b-414a-4ce9-942f-a3c999c7c2df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.138090] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.139370] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1917.139370] env[63241]: value = "task-1821227" [ 1917.139370] env[63241]: _type = "Task" [ 1917.139370] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.146841] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821227, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.307175] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821224, 'name': PowerOffVM_Task, 'duration_secs': 0.923723} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.307175] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1917.307663] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7015a350-ecab-47bf-95e4-4457ab5ffeec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.326979] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c92099-5c2c-496a-aa74-b6bbb596c8b3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.350950] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821225, 'name': Rename_Task, 'duration_secs': 0.835527} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.353255] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1917.353727] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84d181e9-581e-40d9-8c32-b23d012dc88a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.360113] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1917.360419] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-945c32bd-6385-4fb9-b567-117f72bdccef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.363494] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1917.363494] env[63241]: value = "task-1821229" [ 1917.363494] env[63241]: _type = "Task" [ 1917.363494] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.368592] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1917.368592] env[63241]: value = "task-1821230" [ 1917.368592] env[63241]: _type = "Task" [ 1917.368592] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.379392] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821229, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.382954] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1917.383210] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1917.383505] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.383668] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.383851] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1917.384100] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3be96cfe-9c1c-488f-b2e8-518d74c49ddc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.392404] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1917.392627] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1917.393438] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcc80ae2-9607-49f6-b5c6-3882101f66a5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.398892] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1917.398892] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52dfb39b-bde3-bc58-7eb6-c67a36ec8202" [ 1917.398892] env[63241]: _type = "Task" [ 1917.398892] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.406907] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dfb39b-bde3-bc58-7eb6-c67a36ec8202, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.447825] env[63241]: DEBUG oslo_concurrency.lockutils [req-ce4ad53e-d21f-4089-a8da-d68e6ea3f3f6 req-bdbd375c-5aa1-4382-9e9d-236ec3e63724 service nova] Releasing lock "refresh_cache-eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.640129] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.648257] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821227, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067171} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.648535] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1917.649435] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64cf46b-6972-4484-8741-511a54f000a4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.671409] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a/c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1917.671761] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18421f1e-5a2c-4473-bf27-4cbf9a796298 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.691800] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1917.691800] env[63241]: value = "task-1821231" [ 1917.691800] env[63241]: _type = "Task" [ 1917.691800] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.700885] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821231, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.873385] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821229, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.909035] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52dfb39b-bde3-bc58-7eb6-c67a36ec8202, 'name': SearchDatastore_Task, 'duration_secs': 0.008056} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.910038] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22c5be8f-010f-4014-a449-5a3edb26a6dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.915519] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1917.915519] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522c79e5-dca6-5006-b015-9678a957380c" [ 1917.915519] env[63241]: _type = "Task" [ 1917.915519] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.924929] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522c79e5-dca6-5006-b015-9678a957380c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.139250] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.201846] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821231, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.374306] env[63241]: DEBUG oslo_vmware.api [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821229, 'name': PowerOnVM_Task, 'duration_secs': 0.820637} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.374493] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1918.374705] env[63241]: INFO nova.compute.manager [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Took 9.08 seconds to spawn the instance on the hypervisor. [ 1918.374885] env[63241]: DEBUG nova.compute.manager [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1918.375743] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61cd8a3-97fb-4303-a95a-06c74a44f47d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.425791] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522c79e5-dca6-5006-b015-9678a957380c, 'name': SearchDatastore_Task, 'duration_secs': 0.009163} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.426110] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.426375] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. {{(pid=63241) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1918.426637] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8eb63ae-6c91-4d12-a606-65f127953f4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.433886] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1918.433886] env[63241]: value = "task-1821232" [ 1918.433886] env[63241]: _type = "Task" [ 1918.433886] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.441677] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821232, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.640824] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.703092] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821231, 'name': ReconfigVM_Task, 'duration_secs': 0.709044} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.703453] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Reconfigured VM instance instance-00000071 to attach disk [datastore1] c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a/c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1918.704213] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa60015c-7854-4967-9df5-6a49bce70186 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.711445] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1918.711445] env[63241]: value = "task-1821233" [ 1918.711445] env[63241]: _type = "Task" [ 1918.711445] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.720457] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821233, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.893205] env[63241]: INFO nova.compute.manager [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Took 19.11 seconds to build instance. [ 1918.944395] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821232, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.395597} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.944719] env[63241]: INFO nova.virt.vmwareapi.ds_util [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. [ 1918.945515] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b852ea6-ca2a-4a5f-af48-e7de4a97dd9c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.972960] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1918.974493] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-696f5dbd-0829-474f-ad7d-01f8efb766bd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.990204] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquiring lock "f55ed224-90d4-4fdc-bd78-d1cfb9f641e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.990454] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "f55ed224-90d4-4fdc-bd78-d1cfb9f641e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.996711] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1918.996711] env[63241]: value = "task-1821234" [ 1918.996711] env[63241]: _type = "Task" [ 1918.996711] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.012552] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821234, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.139826] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.223530] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821233, 'name': Rename_Task, 'duration_secs': 0.230963} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.223991] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1919.224332] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d96f803-71f0-4692-8de6-ed17370d811c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.230712] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1919.230712] env[63241]: value = "task-1821235" [ 1919.230712] env[63241]: _type = "Task" [ 1919.230712] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.238440] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.395302] env[63241]: DEBUG oslo_concurrency.lockutils [None req-fb886d37-4bd8-43d3-ba53-8474eeb27bff tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "215f658f-2af6-4525-b94c-489ad794e6f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.625s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.498025] env[63241]: DEBUG nova.compute.manager [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1919.513550] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.641663] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.737533] env[63241]: DEBUG nova.compute.manager [req-7df80958-de05-4b44-98b3-d7076b4dc0a9 req-709e99fc-cad3-4133-a42d-74243f0253fb service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Received event network-changed-fbe70abb-a696-4530-893c-079aa3168dc7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1919.737660] env[63241]: DEBUG nova.compute.manager [req-7df80958-de05-4b44-98b3-d7076b4dc0a9 req-709e99fc-cad3-4133-a42d-74243f0253fb service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Refreshing instance network info cache due to event network-changed-fbe70abb-a696-4530-893c-079aa3168dc7. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1919.737859] env[63241]: DEBUG oslo_concurrency.lockutils [req-7df80958-de05-4b44-98b3-d7076b4dc0a9 req-709e99fc-cad3-4133-a42d-74243f0253fb service nova] Acquiring lock "refresh_cache-215f658f-2af6-4525-b94c-489ad794e6f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.738016] env[63241]: DEBUG oslo_concurrency.lockutils [req-7df80958-de05-4b44-98b3-d7076b4dc0a9 req-709e99fc-cad3-4133-a42d-74243f0253fb service nova] Acquired lock "refresh_cache-215f658f-2af6-4525-b94c-489ad794e6f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.738313] env[63241]: DEBUG nova.network.neutron [req-7df80958-de05-4b44-98b3-d7076b4dc0a9 req-709e99fc-cad3-4133-a42d-74243f0253fb service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Refreshing network info cache for port fbe70abb-a696-4530-893c-079aa3168dc7 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1919.745199] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821235, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.013850] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821234, 'name': ReconfigVM_Task, 'duration_secs': 0.596328} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.014381] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1920.015252] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d13095-a9d4-4bfb-9459-b7f12940e6f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.044047] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.044308] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.045943] env[63241]: INFO nova.compute.claims [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1920.048543] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6eb21fb-cfc1-4de2-a85a-682fb4a6177d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.065386] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1920.065386] env[63241]: value = "task-1821237" [ 1920.065386] env[63241]: _type = "Task" [ 1920.065386] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.073664] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821237, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.144570] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.243459] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821235, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.458072] env[63241]: DEBUG nova.network.neutron [req-7df80958-de05-4b44-98b3-d7076b4dc0a9 req-709e99fc-cad3-4133-a42d-74243f0253fb service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Updated VIF entry in instance network info cache for port fbe70abb-a696-4530-893c-079aa3168dc7. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1920.458466] env[63241]: DEBUG nova.network.neutron [req-7df80958-de05-4b44-98b3-d7076b4dc0a9 req-709e99fc-cad3-4133-a42d-74243f0253fb service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Updating instance_info_cache with network_info: [{"id": "fbe70abb-a696-4530-893c-079aa3168dc7", "address": "fa:16:3e:b2:b4:05", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe70abb-a6", "ovs_interfaceid": "fbe70abb-a696-4530-893c-079aa3168dc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.576822] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821237, 'name': ReconfigVM_Task, 'duration_secs': 0.501437} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.576822] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1920.577135] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75ab0244-1b1a-4aad-9e86-2d72e61bfb33 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.583538] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1920.583538] env[63241]: value = "task-1821238" [ 1920.583538] env[63241]: _type = "Task" [ 1920.583538] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.595806] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821238, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.641275] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.742902] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821235, 'name': PowerOnVM_Task, 'duration_secs': 1.398281} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.743295] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1920.743464] env[63241]: INFO nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Took 9.42 seconds to spawn the instance on the hypervisor. [ 1920.743656] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1920.744458] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3781b5d-e2f9-433d-95cd-306ce1ed4c8d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.961197] env[63241]: DEBUG oslo_concurrency.lockutils [req-7df80958-de05-4b44-98b3-d7076b4dc0a9 req-709e99fc-cad3-4133-a42d-74243f0253fb service nova] Releasing lock "refresh_cache-215f658f-2af6-4525-b94c-489ad794e6f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.094049] env[63241]: DEBUG oslo_vmware.api [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821238, 'name': PowerOnVM_Task, 'duration_secs': 0.416226} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.094049] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1921.096590] env[63241]: DEBUG nova.compute.manager [None req-d814bc47-15a3-4e3d-9c12-b5732708bc06 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1921.097398] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f45510-19ae-4325-8a04-7ad05a91b9c3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.141813] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.220168] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecc4cd0-8b60-46a7-ba89-476e29bc83bd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.227706] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e435b1-3793-4ef0-a3a3-395a38b3f4e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.262945] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2b8559-3e62-4b65-be3f-2317dc0e95b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.267901] env[63241]: INFO nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Took 20.98 seconds to build instance. [ 1921.272355] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26655803-b1e6-41ae-8f7d-d31e9b634413 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.287196] env[63241]: DEBUG nova.compute.provider_tree [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1921.638851] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1921.639098] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377223', 'volume_id': '3db4b146-c795-4d6d-8983-4d7672e1b19d', 'name': 'volume-3db4b146-c795-4d6d-8983-4d7672e1b19d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0ea8cf6-4023-4093-b0bc-67b02604125a', 'attached_at': '', 'detached_at': '', 'volume_id': '3db4b146-c795-4d6d-8983-4d7672e1b19d', 'serial': '3db4b146-c795-4d6d-8983-4d7672e1b19d'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1921.639888] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c6cd39-1259-4484-9ec8-e026a71c2913 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.645889] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.658906] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52b8b4b-794e-4940-8cf3-243303895134 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.685938] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] volume-3db4b146-c795-4d6d-8983-4d7672e1b19d/volume-3db4b146-c795-4d6d-8983-4d7672e1b19d.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1921.686289] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38cf51b5-7f3c-4ea1-8042-e0c3ee3342ae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.703630] env[63241]: DEBUG oslo_vmware.api [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1921.703630] env[63241]: value = "task-1821239" [ 1921.703630] env[63241]: _type = "Task" [ 1921.703630] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.711440] env[63241]: DEBUG oslo_vmware.api [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821239, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.769774] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.489s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.810266] env[63241]: ERROR nova.scheduler.client.report [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [req-2f692a8f-bd8f-438b-948f-df7b2d203f42] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2f692a8f-bd8f-438b-948f-df7b2d203f42"}]} [ 1921.824992] env[63241]: DEBUG nova.scheduler.client.report [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1921.836795] env[63241]: DEBUG nova.scheduler.client.report [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1921.837015] env[63241]: DEBUG nova.compute.provider_tree [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1921.847616] env[63241]: DEBUG nova.scheduler.client.report [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1921.862774] env[63241]: DEBUG nova.scheduler.client.report [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1922.000349] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241ba9a1-f35f-451b-8cf7-f6b68c7e6416 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.007904] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666f4617-d360-4853-8030-8ec020ad613c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.039058] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffd692f-5c4a-41c9-a43a-18e2253a0a7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.046683] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5deb4d4a-0182-4c88-aedf-9ae9c234be50 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.059620] env[63241]: DEBUG nova.compute.provider_tree [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1922.142519] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.213076] env[63241]: DEBUG oslo_vmware.api [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821239, 'name': ReconfigVM_Task, 'duration_secs': 0.35011} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.213367] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Reconfigured VM instance instance-0000006a to attach disk [datastore1] volume-3db4b146-c795-4d6d-8983-4d7672e1b19d/volume-3db4b146-c795-4d6d-8983-4d7672e1b19d.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1922.218063] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d991ed4-71c6-46cc-abd2-6b10d36aa2a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.231491] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.231705] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.236612] env[63241]: DEBUG oslo_vmware.api [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1922.236612] env[63241]: value = "task-1821240" [ 1922.236612] env[63241]: _type = "Task" [ 1922.236612] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.245010] env[63241]: DEBUG oslo_vmware.api [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821240, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.586848] env[63241]: DEBUG nova.scheduler.client.report [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 170 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1922.587328] env[63241]: DEBUG nova.compute.provider_tree [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 170 to 171 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1922.587685] env[63241]: DEBUG nova.compute.provider_tree [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1922.644175] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.736608] env[63241]: DEBUG nova.compute.manager [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1922.750383] env[63241]: DEBUG oslo_vmware.api [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821240, 'name': ReconfigVM_Task, 'duration_secs': 0.142095} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.750971] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377223', 'volume_id': '3db4b146-c795-4d6d-8983-4d7672e1b19d', 'name': 'volume-3db4b146-c795-4d6d-8983-4d7672e1b19d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0ea8cf6-4023-4093-b0bc-67b02604125a', 'attached_at': '', 'detached_at': '', 'volume_id': '3db4b146-c795-4d6d-8983-4d7672e1b19d', 'serial': '3db4b146-c795-4d6d-8983-4d7672e1b19d'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1923.094059] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.049s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.094059] env[63241]: DEBUG nova.compute.manager [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1923.145513] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.255415] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.255729] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.257451] env[63241]: INFO nova.compute.claims [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1923.599558] env[63241]: DEBUG nova.compute.utils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1923.601013] env[63241]: DEBUG nova.compute.manager [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Not allocating networking since 'none' was specified. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1923.645547] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.808116] env[63241]: DEBUG nova.objects.instance [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'flavor' on Instance uuid c0ea8cf6-4023-4093-b0bc-67b02604125a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1924.102778] env[63241]: DEBUG nova.compute.manager [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1924.146532] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.314126] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ee212e28-a35e-467c-88a3-3d035694e521 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.286s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.414730] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd3068d-1fcd-4634-a97f-b4e914f5b2b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.422222] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ba617f-f961-491f-b83e-84e4d79f3287 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.454998] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6735536-47c3-4cd8-b267-b97d77314e2f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.463143] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7737abca-a9de-4a91-9711-93f599f27d67 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.477267] env[63241]: DEBUG nova.compute.provider_tree [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1924.556719] env[63241]: DEBUG oslo_concurrency.lockutils [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.557019] env[63241]: DEBUG oslo_concurrency.lockutils [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.647399] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821223, 'name': CreateVM_Task, 'duration_secs': 8.387117} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.647558] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1924.648244] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.648411] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.648726] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1924.649014] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb2bcd8d-f5f1-4e4b-aebe-71cde612f793 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.653928] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1924.653928] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52817bd2-f9ce-f1ac-a41f-8947582a52b7" [ 1924.653928] env[63241]: _type = "Task" [ 1924.653928] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.662868] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52817bd2-f9ce-f1ac-a41f-8947582a52b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.980387] env[63241]: DEBUG nova.scheduler.client.report [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1925.059760] env[63241]: INFO nova.compute.manager [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Detaching volume 3db1f9f8-4a70-4324-92e6-c2279747c925 [ 1925.112421] env[63241]: DEBUG nova.compute.manager [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1925.164824] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52817bd2-f9ce-f1ac-a41f-8947582a52b7, 'name': SearchDatastore_Task, 'duration_secs': 0.009665} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.165219] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.165669] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1925.165710] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.165837] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.166041] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1925.166315] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c526b3d2-7125-4b45-af17-4c665c894d26 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.169620] env[63241]: INFO nova.virt.block_device [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Attempting to driver detach volume 3db1f9f8-4a70-4324-92e6-c2279747c925 from mountpoint /dev/sdb [ 1925.169843] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1925.170042] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377216', 'volume_id': '3db1f9f8-4a70-4324-92e6-c2279747c925', 'name': 'volume-3db1f9f8-4a70-4324-92e6-c2279747c925', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0ea8cf6-4023-4093-b0bc-67b02604125a', 'attached_at': '', 'detached_at': '', 'volume_id': '3db1f9f8-4a70-4324-92e6-c2279747c925', 'serial': '3db1f9f8-4a70-4324-92e6-c2279747c925'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1925.170810] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46f5456-19e5-433a-b99f-2273aed9495f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.174886] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1925.175095] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1925.195608] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac8d4597-c9ca-4e1d-bd4e-9a1a9d8b7064 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.200138] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65c1b08-dcde-4ece-85b0-ea157e2f4a97 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.207852] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c472db7-ec83-421c-afe0-7e4468d5eeb8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.210347] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1925.210347] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]520944ae-47cf-9b10-08a1-1bbeab337e8b" [ 1925.210347] env[63241]: _type = "Task" [ 1925.210347] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.212421] env[63241]: DEBUG nova.virt.hardware [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1925.212669] env[63241]: DEBUG nova.virt.hardware [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1925.212833] env[63241]: DEBUG nova.virt.hardware [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1925.213024] env[63241]: DEBUG nova.virt.hardware [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1925.213175] env[63241]: DEBUG nova.virt.hardware [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1925.213323] env[63241]: DEBUG nova.virt.hardware [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1925.213546] env[63241]: DEBUG nova.virt.hardware [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1925.213694] env[63241]: DEBUG nova.virt.hardware [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1925.213861] env[63241]: DEBUG nova.virt.hardware [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1925.214032] env[63241]: DEBUG nova.virt.hardware [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1925.214219] env[63241]: DEBUG nova.virt.hardware [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1925.215009] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f873a64-9861-4385-badd-12966a381360 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.238799] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0287e701-9092-430d-9a2b-ee7016ab8230 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.246257] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]520944ae-47cf-9b10-08a1-1bbeab337e8b, 'name': SearchDatastore_Task, 'duration_secs': 0.008958} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.248205] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8301b41-cdf8-4b56-b646-c9bb33db14d9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.261421] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b5812d5-e6ec-4ff4-b496-1d359c794117 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.263813] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] The volume has not been displaced from its original location: [datastore1] volume-3db1f9f8-4a70-4324-92e6-c2279747c925/volume-3db1f9f8-4a70-4324-92e6-c2279747c925.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1925.269399] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1925.269998] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-542f1309-145f-49e0-82ce-24dbd074d5b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.290603] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1925.296074] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Creating folder: Project (05f29b598a4147e9b10e6d9ad0e29772). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1925.297630] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0c421d6-719a-41a2-b823-e4048bd7b208 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.299251] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1925.299251] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d896fd-8e08-cb05-2685-4aaedaec219f" [ 1925.299251] env[63241]: _type = "Task" [ 1925.299251] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.303336] env[63241]: DEBUG oslo_vmware.api [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1925.303336] env[63241]: value = "task-1821241" [ 1925.303336] env[63241]: _type = "Task" [ 1925.303336] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.311046] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d896fd-8e08-cb05-2685-4aaedaec219f, 'name': SearchDatastore_Task, 'duration_secs': 0.009546} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.311573] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.311832] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] eb8e453e-76bf-4489-9a5f-9b15e03cd6ba/eb8e453e-76bf-4489-9a5f-9b15e03cd6ba.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1925.312066] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed6d11eb-e020-4187-aebf-57e837a57bf2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.318860] env[63241]: DEBUG oslo_vmware.api [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821241, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.319637] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Created folder: Project (05f29b598a4147e9b10e6d9ad0e29772) in parent group-v376927. [ 1925.319821] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Creating folder: Instances. Parent ref: group-v377224. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1925.320031] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2fa0b3db-4eaa-49c4-808b-9adac6b8ff5a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.324413] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1925.324413] env[63241]: value = "task-1821243" [ 1925.324413] env[63241]: _type = "Task" [ 1925.324413] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.332425] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.333665] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Created folder: Instances in parent group-v377224. [ 1925.333929] env[63241]: DEBUG oslo.service.loopingcall [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1925.334179] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1925.334398] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e6502e0-875c-4da2-a829-4aeed022762e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.351790] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1925.351790] env[63241]: value = "task-1821245" [ 1925.351790] env[63241]: _type = "Task" [ 1925.351790] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.359757] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821245, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.486033] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.230s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.487032] env[63241]: DEBUG nova.compute.manager [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1925.814896] env[63241]: DEBUG oslo_vmware.api [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821241, 'name': ReconfigVM_Task, 'duration_secs': 0.233932} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.815167] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1925.820363] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cae5e5f8-c3cf-47bf-9fd1-93d6690e57a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.840558] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821243, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441093} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.841206] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] eb8e453e-76bf-4489-9a5f-9b15e03cd6ba/eb8e453e-76bf-4489-9a5f-9b15e03cd6ba.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1925.841461] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1925.841797] env[63241]: DEBUG oslo_vmware.api [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1925.841797] env[63241]: value = "task-1821246" [ 1925.841797] env[63241]: _type = "Task" [ 1925.841797] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.842022] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e002869a-390a-4b2d-88e7-5d6b41cc490a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.853231] env[63241]: DEBUG oslo_vmware.api [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821246, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.857498] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1925.857498] env[63241]: value = "task-1821247" [ 1925.857498] env[63241]: _type = "Task" [ 1925.857498] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.864602] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821245, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.869534] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821247, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.992462] env[63241]: DEBUG nova.compute.utils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1925.994603] env[63241]: DEBUG nova.compute.manager [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1925.994603] env[63241]: DEBUG nova.network.neutron [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1926.045424] env[63241]: DEBUG nova.policy [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c54558668b9d43bd9adc17fce71df03e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f7af6cf881f84203a7f0a546466bf76f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1926.355795] env[63241]: DEBUG oslo_vmware.api [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821246, 'name': ReconfigVM_Task, 'duration_secs': 0.138645} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.359325] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377216', 'volume_id': '3db1f9f8-4a70-4324-92e6-c2279747c925', 'name': 'volume-3db1f9f8-4a70-4324-92e6-c2279747c925', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0ea8cf6-4023-4093-b0bc-67b02604125a', 'attached_at': '', 'detached_at': '', 'volume_id': '3db1f9f8-4a70-4324-92e6-c2279747c925', 'serial': '3db1f9f8-4a70-4324-92e6-c2279747c925'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1926.369728] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821245, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.372817] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821247, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070836} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.373133] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1926.374450] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67595175-6daa-4d03-81e7-61d71a0d6a32 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.397446] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] eb8e453e-76bf-4489-9a5f-9b15e03cd6ba/eb8e453e-76bf-4489-9a5f-9b15e03cd6ba.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1926.397940] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baf2c7fe-2f1f-49fd-8eba-581bfbe593f9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.421614] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1926.421614] env[63241]: value = "task-1821248" [ 1926.421614] env[63241]: _type = "Task" [ 1926.421614] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.429624] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821248, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.497564] env[63241]: DEBUG nova.compute.manager [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1926.546274] env[63241]: DEBUG nova.network.neutron [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Successfully created port: 5fc0cb3d-1a42-482e-a23f-9864a91d89c0 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1926.865421] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821245, 'name': CreateVM_Task, 'duration_secs': 1.40912} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.865596] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1926.866055] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.866221] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.866555] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1926.866815] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9f70ded-8e8f-4dc1-a8cc-bca27790294f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.872561] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1926.872561] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5263b2bc-617d-1e69-7190-36494fb16809" [ 1926.872561] env[63241]: _type = "Task" [ 1926.872561] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.880801] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5263b2bc-617d-1e69-7190-36494fb16809, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.904860] env[63241]: DEBUG nova.objects.instance [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'flavor' on Instance uuid c0ea8cf6-4023-4093-b0bc-67b02604125a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1926.932480] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821248, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.387046] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5263b2bc-617d-1e69-7190-36494fb16809, 'name': SearchDatastore_Task, 'duration_secs': 0.010479} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.387363] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1927.387584] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1927.387808] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.387954] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.388171] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1927.388424] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b8d7a12-5ba9-4e03-a3bb-a03f97d37c54 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.396755] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1927.397083] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1927.397644] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1477334e-795c-453a-a7e0-cdaacd1604b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.403196] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1927.403196] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52970acf-d4d5-d1be-5818-361daf83f9e0" [ 1927.403196] env[63241]: _type = "Task" [ 1927.403196] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.412245] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52970acf-d4d5-d1be-5818-361daf83f9e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.431603] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821248, 'name': ReconfigVM_Task, 'duration_secs': 0.841482} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.431924] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Reconfigured VM instance instance-00000072 to attach disk [datastore1] eb8e453e-76bf-4489-9a5f-9b15e03cd6ba/eb8e453e-76bf-4489-9a5f-9b15e03cd6ba.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1927.432574] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-423421ce-0f85-4e26-aa9a-91347c13c2b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.438426] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1927.438426] env[63241]: value = "task-1821249" [ 1927.438426] env[63241]: _type = "Task" [ 1927.438426] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.450016] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821249, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.508265] env[63241]: DEBUG nova.compute.manager [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1927.535842] env[63241]: DEBUG nova.virt.hardware [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1927.536141] env[63241]: DEBUG nova.virt.hardware [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1927.536323] env[63241]: DEBUG nova.virt.hardware [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1927.536514] env[63241]: DEBUG nova.virt.hardware [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1927.536662] env[63241]: DEBUG nova.virt.hardware [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1927.536813] env[63241]: DEBUG nova.virt.hardware [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1927.537036] env[63241]: DEBUG nova.virt.hardware [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1927.537208] env[63241]: DEBUG nova.virt.hardware [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1927.537380] env[63241]: DEBUG nova.virt.hardware [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1927.537543] env[63241]: DEBUG nova.virt.hardware [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1927.537718] env[63241]: DEBUG nova.virt.hardware [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1927.538602] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad803fb1-1051-4c13-b0d9-e3b1be520140 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.547008] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535d19ca-4391-49cf-93a1-6d5504bcf40f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.882992] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.913411] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52970acf-d4d5-d1be-5818-361daf83f9e0, 'name': SearchDatastore_Task, 'duration_secs': 0.008808} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.914388] env[63241]: DEBUG oslo_concurrency.lockutils [None req-23804f96-2a2a-4ab1-b030-dfb84574080d tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.357s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.915357] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69aef71a-f010-4b32-85c9-11ded845f58c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.917726] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.035s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.921723] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1927.921723] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527bdbbf-d893-6634-b0ca-6be8760c2717" [ 1927.921723] env[63241]: _type = "Task" [ 1927.921723] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.929604] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527bdbbf-d893-6634-b0ca-6be8760c2717, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.947693] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821249, 'name': Rename_Task, 'duration_secs': 0.335124} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.947961] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1927.948277] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a934d42-f8d1-4a1c-b932-9758f9904f8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.954540] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1927.954540] env[63241]: value = "task-1821250" [ 1927.954540] env[63241]: _type = "Task" [ 1927.954540] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.962339] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821250, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.007232] env[63241]: DEBUG nova.compute.manager [req-1fbcfadc-d4fe-40f5-a5d7-fbebdd68f2ff req-5d4d57d0-c48c-4ffd-a0c5-f624a62ea7ab service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Received event network-vif-plugged-5fc0cb3d-1a42-482e-a23f-9864a91d89c0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1928.007533] env[63241]: DEBUG oslo_concurrency.lockutils [req-1fbcfadc-d4fe-40f5-a5d7-fbebdd68f2ff req-5d4d57d0-c48c-4ffd-a0c5-f624a62ea7ab service nova] Acquiring lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.007767] env[63241]: DEBUG oslo_concurrency.lockutils [req-1fbcfadc-d4fe-40f5-a5d7-fbebdd68f2ff req-5d4d57d0-c48c-4ffd-a0c5-f624a62ea7ab service nova] Lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.007962] env[63241]: DEBUG oslo_concurrency.lockutils [req-1fbcfadc-d4fe-40f5-a5d7-fbebdd68f2ff req-5d4d57d0-c48c-4ffd-a0c5-f624a62ea7ab service nova] Lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.008178] env[63241]: DEBUG nova.compute.manager [req-1fbcfadc-d4fe-40f5-a5d7-fbebdd68f2ff req-5d4d57d0-c48c-4ffd-a0c5-f624a62ea7ab service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] No waiting events found dispatching network-vif-plugged-5fc0cb3d-1a42-482e-a23f-9864a91d89c0 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1928.008365] env[63241]: WARNING nova.compute.manager [req-1fbcfadc-d4fe-40f5-a5d7-fbebdd68f2ff req-5d4d57d0-c48c-4ffd-a0c5-f624a62ea7ab service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Received unexpected event network-vif-plugged-5fc0cb3d-1a42-482e-a23f-9864a91d89c0 for instance with vm_state building and task_state spawning. [ 1928.151368] env[63241]: DEBUG nova.network.neutron [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Successfully updated port: 5fc0cb3d-1a42-482e-a23f-9864a91d89c0 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1928.422105] env[63241]: INFO nova.compute.manager [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Detaching volume 3db4b146-c795-4d6d-8983-4d7672e1b19d [ 1928.437083] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527bdbbf-d893-6634-b0ca-6be8760c2717, 'name': SearchDatastore_Task, 'duration_secs': 0.010487} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.437083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.437207] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f55ed224-90d4-4fdc-bd78-d1cfb9f641e4/f55ed224-90d4-4fdc-bd78-d1cfb9f641e4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1928.437411] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3921a70e-e785-4311-b43b-86cd4818e470 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.444832] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1928.444832] env[63241]: value = "task-1821251" [ 1928.444832] env[63241]: _type = "Task" [ 1928.444832] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.452711] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821251, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.458962] env[63241]: INFO nova.virt.block_device [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Attempting to driver detach volume 3db4b146-c795-4d6d-8983-4d7672e1b19d from mountpoint /dev/sdc [ 1928.459247] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1928.459442] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377223', 'volume_id': '3db4b146-c795-4d6d-8983-4d7672e1b19d', 'name': 'volume-3db4b146-c795-4d6d-8983-4d7672e1b19d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0ea8cf6-4023-4093-b0bc-67b02604125a', 'attached_at': '', 'detached_at': '', 'volume_id': '3db4b146-c795-4d6d-8983-4d7672e1b19d', 'serial': '3db4b146-c795-4d6d-8983-4d7672e1b19d'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1928.460213] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5802a51-063c-4bd1-a7dd-528bb9696e9f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.467627] env[63241]: DEBUG oslo_vmware.api [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821250, 'name': PowerOnVM_Task, 'duration_secs': 0.482126} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.483890] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1928.484142] env[63241]: INFO nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Took 14.87 seconds to spawn the instance on the hypervisor. [ 1928.484327] env[63241]: DEBUG nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1928.485419] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d168c8b-5c56-4eb7-ae2b-e3ad64843adb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.490128] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da730d4-ef95-4a1e-8c1f-a607cd2232c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.501022] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18eed9c-8c8a-404e-a128-9db42bfc1bcf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.522804] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c15453-fda7-4512-8b3a-1a879a373edc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.539139] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] The volume has not been displaced from its original location: [datastore1] volume-3db4b146-c795-4d6d-8983-4d7672e1b19d/volume-3db4b146-c795-4d6d-8983-4d7672e1b19d.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1928.544679] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Reconfiguring VM instance instance-0000006a to detach disk 2002 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1928.545068] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-560af715-b455-48c8-addd-7fd47f5a4997 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.564186] env[63241]: DEBUG oslo_vmware.api [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1928.564186] env[63241]: value = "task-1821252" [ 1928.564186] env[63241]: _type = "Task" [ 1928.564186] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.572761] env[63241]: DEBUG oslo_vmware.api [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821252, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.654628] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.654628] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.654628] env[63241]: DEBUG nova.network.neutron [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1928.954707] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821251, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445363} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.954917] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f55ed224-90d4-4fdc-bd78-d1cfb9f641e4/f55ed224-90d4-4fdc-bd78-d1cfb9f641e4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1928.955146] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1928.955395] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-888feb57-1344-42fb-8932-0354b7f076d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.961901] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1928.961901] env[63241]: value = "task-1821253" [ 1928.961901] env[63241]: _type = "Task" [ 1928.961901] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.969212] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821253, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.008341] env[63241]: INFO nova.compute.manager [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Took 28.69 seconds to build instance. [ 1929.074375] env[63241]: DEBUG oslo_vmware.api [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821252, 'name': ReconfigVM_Task, 'duration_secs': 0.421099} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.074649] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Reconfigured VM instance instance-0000006a to detach disk 2002 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1929.079229] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e522d989-3423-4e3c-a819-9ddc6f9e0b9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.094318] env[63241]: DEBUG oslo_vmware.api [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1929.094318] env[63241]: value = "task-1821254" [ 1929.094318] env[63241]: _type = "Task" [ 1929.094318] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.102110] env[63241]: DEBUG oslo_vmware.api [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821254, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.202064] env[63241]: DEBUG nova.network.neutron [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1929.353702] env[63241]: DEBUG nova.network.neutron [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Updating instance_info_cache with network_info: [{"id": "5fc0cb3d-1a42-482e-a23f-9864a91d89c0", "address": "fa:16:3e:65:38:e9", "network": {"id": "93c05514-d892-481d-9774-124866cb4462", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-742911692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f7af6cf881f84203a7f0a546466bf76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc0cb3d-1a", "ovs_interfaceid": "5fc0cb3d-1a42-482e-a23f-9864a91d89c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1929.472207] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821253, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063432} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.472501] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1929.473320] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410bea3c-12a3-41ea-b660-279fd0750463 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.493312] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] f55ed224-90d4-4fdc-bd78-d1cfb9f641e4/f55ed224-90d4-4fdc-bd78-d1cfb9f641e4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1929.493892] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab8bc147-3821-4523-b84b-58b7a04c7b0e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.509869] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f02a4803-e2aa-454d-b1df-a7e5ff67593b tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.199s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.513594] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1929.513594] env[63241]: value = "task-1821255" [ 1929.513594] env[63241]: _type = "Task" [ 1929.513594] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.521304] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821255, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.604378] env[63241]: DEBUG oslo_vmware.api [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821254, 'name': ReconfigVM_Task, 'duration_secs': 0.151867} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.604732] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377223', 'volume_id': '3db4b146-c795-4d6d-8983-4d7672e1b19d', 'name': 'volume-3db4b146-c795-4d6d-8983-4d7672e1b19d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c0ea8cf6-4023-4093-b0bc-67b02604125a', 'attached_at': '', 'detached_at': '', 'volume_id': '3db4b146-c795-4d6d-8983-4d7672e1b19d', 'serial': '3db4b146-c795-4d6d-8983-4d7672e1b19d'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1929.751460] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.751695] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.751886] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.752083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.752257] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.754518] env[63241]: INFO nova.compute.manager [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Terminating instance [ 1929.756260] env[63241]: DEBUG nova.compute.manager [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1929.756453] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1929.757296] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b458a601-b29b-447d-bfd3-e5c8c4aecdeb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.764931] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1929.765186] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86a8bcb3-a36f-4128-a18a-01fb3ea0831c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.771197] env[63241]: DEBUG oslo_vmware.api [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1929.771197] env[63241]: value = "task-1821256" [ 1929.771197] env[63241]: _type = "Task" [ 1929.771197] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.778936] env[63241]: DEBUG oslo_vmware.api [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.833864] env[63241]: DEBUG oslo_concurrency.lockutils [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.834231] env[63241]: DEBUG oslo_concurrency.lockutils [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.834467] env[63241]: DEBUG oslo_concurrency.lockutils [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.834668] env[63241]: DEBUG oslo_concurrency.lockutils [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.834847] env[63241]: DEBUG oslo_concurrency.lockutils [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.837080] env[63241]: INFO nova.compute.manager [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Terminating instance [ 1929.838901] env[63241]: DEBUG nova.compute.manager [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1929.839111] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1929.839956] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d98997-071e-4059-9079-707a94123754 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.847663] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1929.847898] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26047503-4689-4942-b75c-936a6dd4c618 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.854412] env[63241]: DEBUG oslo_vmware.api [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1929.854412] env[63241]: value = "task-1821257" [ 1929.854412] env[63241]: _type = "Task" [ 1929.854412] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.857848] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Releasing lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.858155] env[63241]: DEBUG nova.compute.manager [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Instance network_info: |[{"id": "5fc0cb3d-1a42-482e-a23f-9864a91d89c0", "address": "fa:16:3e:65:38:e9", "network": {"id": "93c05514-d892-481d-9774-124866cb4462", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-742911692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f7af6cf881f84203a7f0a546466bf76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc0cb3d-1a", "ovs_interfaceid": "5fc0cb3d-1a42-482e-a23f-9864a91d89c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1929.858536] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:38:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9ec24851-7bb6-426b-b28f-f7b246df1713', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5fc0cb3d-1a42-482e-a23f-9864a91d89c0', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1929.866476] env[63241]: DEBUG oslo.service.loopingcall [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1929.867179] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1929.867424] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52d3a035-c4ef-46cf-9326-1710e663d757 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.885206] env[63241]: DEBUG oslo_vmware.api [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821257, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.890967] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1929.890967] env[63241]: value = "task-1821258" [ 1929.890967] env[63241]: _type = "Task" [ 1929.890967] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.898280] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821258, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.023676] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821255, 'name': ReconfigVM_Task, 'duration_secs': 0.277939} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.023979] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Reconfigured VM instance instance-00000073 to attach disk [datastore1] f55ed224-90d4-4fdc-bd78-d1cfb9f641e4/f55ed224-90d4-4fdc-bd78-d1cfb9f641e4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1930.024607] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e4b735b-c928-446a-9aa5-767f90210af0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.030955] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1930.030955] env[63241]: value = "task-1821259" [ 1930.030955] env[63241]: _type = "Task" [ 1930.030955] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.039352] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821259, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.041271] env[63241]: DEBUG nova.compute.manager [req-d9621d50-c64a-41a2-8e26-5f56a64d7db2 req-2e7a7eb0-c0d3-465e-85b0-c6103d357956 service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Received event network-changed-5fc0cb3d-1a42-482e-a23f-9864a91d89c0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1930.041451] env[63241]: DEBUG nova.compute.manager [req-d9621d50-c64a-41a2-8e26-5f56a64d7db2 req-2e7a7eb0-c0d3-465e-85b0-c6103d357956 service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Refreshing instance network info cache due to event network-changed-5fc0cb3d-1a42-482e-a23f-9864a91d89c0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1930.041657] env[63241]: DEBUG oslo_concurrency.lockutils [req-d9621d50-c64a-41a2-8e26-5f56a64d7db2 req-2e7a7eb0-c0d3-465e-85b0-c6103d357956 service nova] Acquiring lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.041800] env[63241]: DEBUG oslo_concurrency.lockutils [req-d9621d50-c64a-41a2-8e26-5f56a64d7db2 req-2e7a7eb0-c0d3-465e-85b0-c6103d357956 service nova] Acquired lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.041959] env[63241]: DEBUG nova.network.neutron [req-d9621d50-c64a-41a2-8e26-5f56a64d7db2 req-2e7a7eb0-c0d3-465e-85b0-c6103d357956 service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Refreshing network info cache for port 5fc0cb3d-1a42-482e-a23f-9864a91d89c0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1930.152465] env[63241]: DEBUG nova.objects.instance [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'flavor' on Instance uuid c0ea8cf6-4023-4093-b0bc-67b02604125a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1930.281122] env[63241]: DEBUG oslo_vmware.api [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821256, 'name': PowerOffVM_Task, 'duration_secs': 0.380461} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.281371] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1930.281554] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1930.281819] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b140fe4-7307-42f2-9db4-1b53d7cc9b83 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.364860] env[63241]: DEBUG oslo_vmware.api [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821257, 'name': PowerOffVM_Task, 'duration_secs': 0.29845} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.364860] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1930.365042] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1930.365246] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0151992a-4b32-43db-8123-c807146d196c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.400509] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821258, 'name': CreateVM_Task, 'duration_secs': 0.352246} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.400797] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1930.401512] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.401678] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.402051] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1930.402405] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd5c6772-1a6b-4d62-a09e-e968757d2b98 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.404954] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1930.405190] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1930.405367] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleting the datastore file [datastore1] c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1930.406299] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9917c132-a3e2-4434-9cc9-277f840029a4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.409426] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1930.409426] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5280eaa7-62b2-a1bf-1d12-76a1baa3b8d7" [ 1930.409426] env[63241]: _type = "Task" [ 1930.409426] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.413604] env[63241]: DEBUG oslo_vmware.api [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1930.413604] env[63241]: value = "task-1821262" [ 1930.413604] env[63241]: _type = "Task" [ 1930.413604] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.419575] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5280eaa7-62b2-a1bf-1d12-76a1baa3b8d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.424100] env[63241]: DEBUG oslo_vmware.api [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.442650] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1930.442895] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1930.443096] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleting the datastore file [datastore1] eb8e453e-76bf-4489-9a5f-9b15e03cd6ba {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1930.443351] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ff80243-a3e8-4ec6-b16f-a8077c60d98c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.450480] env[63241]: DEBUG oslo_vmware.api [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for the task: (returnval){ [ 1930.450480] env[63241]: value = "task-1821263" [ 1930.450480] env[63241]: _type = "Task" [ 1930.450480] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.458678] env[63241]: DEBUG oslo_vmware.api [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821263, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.541164] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821259, 'name': Rename_Task, 'duration_secs': 0.189481} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.541453] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1930.541701] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e59c9a4e-0510-420a-8b03-eea07b181c09 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.550250] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1930.550250] env[63241]: value = "task-1821264" [ 1930.550250] env[63241]: _type = "Task" [ 1930.550250] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.558613] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821264, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.735076] env[63241]: DEBUG nova.network.neutron [req-d9621d50-c64a-41a2-8e26-5f56a64d7db2 req-2e7a7eb0-c0d3-465e-85b0-c6103d357956 service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Updated VIF entry in instance network info cache for port 5fc0cb3d-1a42-482e-a23f-9864a91d89c0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1930.735636] env[63241]: DEBUG nova.network.neutron [req-d9621d50-c64a-41a2-8e26-5f56a64d7db2 req-2e7a7eb0-c0d3-465e-85b0-c6103d357956 service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Updating instance_info_cache with network_info: [{"id": "5fc0cb3d-1a42-482e-a23f-9864a91d89c0", "address": "fa:16:3e:65:38:e9", "network": {"id": "93c05514-d892-481d-9774-124866cb4462", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-742911692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f7af6cf881f84203a7f0a546466bf76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc0cb3d-1a", "ovs_interfaceid": "5fc0cb3d-1a42-482e-a23f-9864a91d89c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1930.923424] env[63241]: DEBUG oslo_vmware.api [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821262, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136132} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.927495] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1930.927646] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1930.927752] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1930.927968] env[63241]: INFO nova.compute.manager [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1930.928232] env[63241]: DEBUG oslo.service.loopingcall [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1930.928445] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5280eaa7-62b2-a1bf-1d12-76a1baa3b8d7, 'name': SearchDatastore_Task, 'duration_secs': 0.010028} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.928653] env[63241]: DEBUG nova.compute.manager [-] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1930.928744] env[63241]: DEBUG nova.network.neutron [-] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1930.930485] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1930.930735] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1930.930980] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.931139] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.931325] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1930.931578] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-afa17596-0f0f-4970-b0b9-5e9c69128c2e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.940477] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1930.940652] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1930.941389] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c30d7607-2284-4e3d-8379-101811e4f426 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.946445] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1930.946445] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52fc481a-7839-7007-c8e9-8ac62ea9bcd3" [ 1930.946445] env[63241]: _type = "Task" [ 1930.946445] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.957951] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fc481a-7839-7007-c8e9-8ac62ea9bcd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.962876] env[63241]: DEBUG oslo_vmware.api [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Task: {'id': task-1821263, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140329} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.964922] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1930.965151] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1930.965353] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1930.965530] env[63241]: INFO nova.compute.manager [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1930.965766] env[63241]: DEBUG oslo.service.loopingcall [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1930.965964] env[63241]: DEBUG nova.compute.manager [-] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1930.966076] env[63241]: DEBUG nova.network.neutron [-] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1931.060898] env[63241]: DEBUG oslo_vmware.api [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821264, 'name': PowerOnVM_Task, 'duration_secs': 0.416781} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.061113] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1931.061321] env[63241]: INFO nova.compute.manager [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Took 5.95 seconds to spawn the instance on the hypervisor. [ 1931.061501] env[63241]: DEBUG nova.compute.manager [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1931.062336] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a12e0cb-5f00-48d9-a64d-afd08ffb1fcf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.159521] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3def626f-bc38-4054-accb-4eefcc970565 tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.242s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.238175] env[63241]: DEBUG oslo_concurrency.lockutils [req-d9621d50-c64a-41a2-8e26-5f56a64d7db2 req-2e7a7eb0-c0d3-465e-85b0-c6103d357956 service nova] Releasing lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.458173] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52fc481a-7839-7007-c8e9-8ac62ea9bcd3, 'name': SearchDatastore_Task, 'duration_secs': 0.009692} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.460039] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da268e35-a833-4133-990c-67a8d65e2f65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.465459] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1931.465459] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527bd9b1-cada-eab1-f5ef-2b5224db9caa" [ 1931.465459] env[63241]: _type = "Task" [ 1931.465459] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.473657] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527bd9b1-cada-eab1-f5ef-2b5224db9caa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.578029] env[63241]: INFO nova.compute.manager [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Took 11.57 seconds to build instance. [ 1931.788782] env[63241]: DEBUG nova.network.neutron [-] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.792621] env[63241]: DEBUG nova.network.neutron [-] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.977516] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527bd9b1-cada-eab1-f5ef-2b5224db9caa, 'name': SearchDatastore_Task, 'duration_secs': 0.009053} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.977516] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.977720] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] dfbe2f8b-e750-45b6-bc90-5021b3c0e267/dfbe2f8b-e750-45b6-bc90-5021b3c0e267.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1931.977844] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e46d7465-ac94-44cf-b833-5ee4faed3806 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.983987] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1931.983987] env[63241]: value = "task-1821265" [ 1931.983987] env[63241]: _type = "Task" [ 1931.983987] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.991472] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821265, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.066205] env[63241]: DEBUG nova.compute.manager [req-e740d54e-581c-4d90-afc6-f0c54662294f req-2b44b1ff-ebdf-4e30-bea2-27a30be8faed service nova] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Received event network-vif-deleted-1a0ff10e-14d3-4b93-b20e-f76c3b7e1207 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1932.066456] env[63241]: DEBUG nova.compute.manager [req-e740d54e-581c-4d90-afc6-f0c54662294f req-2b44b1ff-ebdf-4e30-bea2-27a30be8faed service nova] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Received event network-vif-deleted-5695a20c-9e6c-4223-bc78-d5a80286154f {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1932.079931] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e1b2ac27-c931-4f08-a683-58bfe6e42152 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "f55ed224-90d4-4fdc-bd78-d1cfb9f641e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.089s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1932.101243] env[63241]: INFO nova.compute.manager [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Rebuilding instance [ 1932.145474] env[63241]: DEBUG nova.compute.manager [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1932.146628] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d4ce03-3130-4bea-aec2-e4a23248e1c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.271659] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1932.271861] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1932.272089] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "c0ea8cf6-4023-4093-b0bc-67b02604125a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1932.272278] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1932.272450] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1932.275357] env[63241]: INFO nova.compute.manager [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Terminating instance [ 1932.277407] env[63241]: DEBUG nova.compute.manager [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1932.277610] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1932.278529] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bd9a3e-9d45-4b87-86d9-3155ad4193cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.287514] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1932.287796] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28d6315c-1341-41f7-8f5f-53a1fe891581 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.291126] env[63241]: INFO nova.compute.manager [-] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Took 1.32 seconds to deallocate network for instance. [ 1932.296074] env[63241]: INFO nova.compute.manager [-] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Took 1.37 seconds to deallocate network for instance. [ 1932.299195] env[63241]: DEBUG oslo_vmware.api [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1932.299195] env[63241]: value = "task-1821266" [ 1932.299195] env[63241]: _type = "Task" [ 1932.299195] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.311268] env[63241]: DEBUG oslo_vmware.api [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821266, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.494228] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821265, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.414197} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.494502] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] dfbe2f8b-e750-45b6-bc90-5021b3c0e267/dfbe2f8b-e750-45b6-bc90-5021b3c0e267.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1932.494718] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1932.494985] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3c5c36b-243f-40d5-826a-5a5497111f51 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.503129] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1932.503129] env[63241]: value = "task-1821267" [ 1932.503129] env[63241]: _type = "Task" [ 1932.503129] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.510764] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821267, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.659046] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1932.659381] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55760b68-d133-4d12-8399-4ea2d7a6b180 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.666137] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1932.666137] env[63241]: value = "task-1821268" [ 1932.666137] env[63241]: _type = "Task" [ 1932.666137] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.674496] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821268, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.799149] env[63241]: DEBUG oslo_concurrency.lockutils [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1932.799439] env[63241]: DEBUG oslo_concurrency.lockutils [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1932.799666] env[63241]: DEBUG nova.objects.instance [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lazy-loading 'resources' on Instance uuid eb8e453e-76bf-4489-9a5f-9b15e03cd6ba {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1932.806400] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1932.811764] env[63241]: DEBUG oslo_vmware.api [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821266, 'name': PowerOffVM_Task, 'duration_secs': 0.230473} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.812015] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1932.812198] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1932.812435] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4556dc2a-0121-4722-8e31-8ab6c0ae575c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.982973] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1932.983255] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1932.983485] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Deleting the datastore file [datastore1] c0ea8cf6-4023-4093-b0bc-67b02604125a {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1932.983735] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59e08a3e-27d6-4572-84fa-ff5cc724cbc6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.990227] env[63241]: DEBUG oslo_vmware.api [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for the task: (returnval){ [ 1932.990227] env[63241]: value = "task-1821270" [ 1932.990227] env[63241]: _type = "Task" [ 1932.990227] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.998081] env[63241]: DEBUG oslo_vmware.api [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.011285] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821267, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072179} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.011564] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1933.012360] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ff716b-c05f-4c98-ba65-b312bd95f7ec {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.035356] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] dfbe2f8b-e750-45b6-bc90-5021b3c0e267/dfbe2f8b-e750-45b6-bc90-5021b3c0e267.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1933.035700] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3cb695a-ab86-4545-8778-5dcd44d1e4c0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.056276] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1933.056276] env[63241]: value = "task-1821271" [ 1933.056276] env[63241]: _type = "Task" [ 1933.056276] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.064592] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821271, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.178626] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821268, 'name': PowerOffVM_Task, 'duration_secs': 0.177693} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.178998] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1933.179345] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1933.180456] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefa3fce-dfb9-4b38-a3c9-e20ef1f9f837 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.187532] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1933.187769] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bc8df23-e45d-45da-8f2f-ba2feb93ede5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.210288] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1933.210501] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1933.210770] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Deleting the datastore file [datastore1] f55ed224-90d4-4fdc-bd78-d1cfb9f641e4 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1933.211208] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df886c80-2ffa-4e1a-add5-5180de9e3d81 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.217873] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1933.217873] env[63241]: value = "task-1821273" [ 1933.217873] env[63241]: _type = "Task" [ 1933.217873] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.226355] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821273, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.464705] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaf164b-f2ba-4d44-932b-66362074dd88 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.473068] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c518fa2d-23ca-4e97-90cd-d2e244ba012a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.509485] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8950a8e-721b-46b1-85f5-1f3fc74163ae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.519665] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88fcb4c-370d-4e91-9372-8e8e77db47da {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.523483] env[63241]: DEBUG oslo_vmware.api [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Task: {'id': task-1821270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137127} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.523696] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1933.523904] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1933.524071] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1933.524257] env[63241]: INFO nova.compute.manager [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1933.524492] env[63241]: DEBUG oslo.service.loopingcall [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1933.524964] env[63241]: DEBUG nova.compute.manager [-] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1933.525082] env[63241]: DEBUG nova.network.neutron [-] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1933.534514] env[63241]: DEBUG nova.compute.provider_tree [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1933.567488] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821271, 'name': ReconfigVM_Task, 'duration_secs': 0.277582} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.567586] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Reconfigured VM instance instance-00000074 to attach disk [datastore1] dfbe2f8b-e750-45b6-bc90-5021b3c0e267/dfbe2f8b-e750-45b6-bc90-5021b3c0e267.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1933.568168] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e17ba64f-94ac-4ff4-999c-5aa109d95926 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.574780] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1933.574780] env[63241]: value = "task-1821274" [ 1933.574780] env[63241]: _type = "Task" [ 1933.574780] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.586938] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821274, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.727177] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821273, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089109} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.727454] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1933.727640] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1933.727817] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1934.040413] env[63241]: DEBUG nova.scheduler.client.report [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1934.086599] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821274, 'name': Rename_Task, 'duration_secs': 0.159418} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.086886] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1934.087166] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba9b4f40-7f16-41d6-9238-92d7a709743d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.091465] env[63241]: DEBUG nova.compute.manager [req-139936cf-da58-4319-a254-92daaffd70ca req-2e0e77e4-0fd8-4f94-a120-3214c8e60d38 service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Received event network-vif-deleted-8a7a1e02-257b-4a7c-936b-c3d9f7596043 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1934.091648] env[63241]: INFO nova.compute.manager [req-139936cf-da58-4319-a254-92daaffd70ca req-2e0e77e4-0fd8-4f94-a120-3214c8e60d38 service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Neutron deleted interface 8a7a1e02-257b-4a7c-936b-c3d9f7596043; detaching it from the instance and deleting it from the info cache [ 1934.091858] env[63241]: DEBUG nova.network.neutron [req-139936cf-da58-4319-a254-92daaffd70ca req-2e0e77e4-0fd8-4f94-a120-3214c8e60d38 service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.101615] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1934.101615] env[63241]: value = "task-1821275" [ 1934.101615] env[63241]: _type = "Task" [ 1934.101615] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.113872] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821275, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.513071] env[63241]: DEBUG nova.network.neutron [-] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.545871] env[63241]: DEBUG oslo_concurrency.lockutils [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.746s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.548390] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.742s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.548635] env[63241]: DEBUG nova.objects.instance [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lazy-loading 'resources' on Instance uuid c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1934.565448] env[63241]: INFO nova.scheduler.client.report [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleted allocations for instance eb8e453e-76bf-4489-9a5f-9b15e03cd6ba [ 1934.596721] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ffb70cc9-20f5-4545-bf4f-05d185c57c1b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.609545] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e53c55-7485-418a-a95e-437eddbb0cdf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.626075] env[63241]: DEBUG oslo_vmware.api [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821275, 'name': PowerOnVM_Task, 'duration_secs': 0.429785} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.626352] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1934.626551] env[63241]: INFO nova.compute.manager [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Took 7.12 seconds to spawn the instance on the hypervisor. [ 1934.626727] env[63241]: DEBUG nova.compute.manager [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1934.627484] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e840c4d1-6353-4f22-b198-a68a3147cf80 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.640493] env[63241]: DEBUG nova.compute.manager [req-139936cf-da58-4319-a254-92daaffd70ca req-2e0e77e4-0fd8-4f94-a120-3214c8e60d38 service nova] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Detach interface failed, port_id=8a7a1e02-257b-4a7c-936b-c3d9f7596043, reason: Instance c0ea8cf6-4023-4093-b0bc-67b02604125a could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1934.764734] env[63241]: DEBUG nova.virt.hardware [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1934.765024] env[63241]: DEBUG nova.virt.hardware [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1934.765163] env[63241]: DEBUG nova.virt.hardware [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1934.765353] env[63241]: DEBUG nova.virt.hardware [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1934.765499] env[63241]: DEBUG nova.virt.hardware [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1934.765646] env[63241]: DEBUG nova.virt.hardware [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1934.765860] env[63241]: DEBUG nova.virt.hardware [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1934.766031] env[63241]: DEBUG nova.virt.hardware [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1934.766208] env[63241]: DEBUG nova.virt.hardware [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1934.766368] env[63241]: DEBUG nova.virt.hardware [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1934.766537] env[63241]: DEBUG nova.virt.hardware [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1934.767414] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d77a0c-a09d-4abf-a6fc-e2cb3bd7c85f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.775450] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2366c4c9-5dc0-47b6-b101-736a46ba9aab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.788811] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Instance VIF info [] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1934.794567] env[63241]: DEBUG oslo.service.loopingcall [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1934.794824] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1934.795042] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f590a8c3-ea67-46cc-9588-9a507b6a4b90 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.811869] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1934.811869] env[63241]: value = "task-1821276" [ 1934.811869] env[63241]: _type = "Task" [ 1934.811869] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.820058] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821276, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.016116] env[63241]: INFO nova.compute.manager [-] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Took 1.49 seconds to deallocate network for instance. [ 1935.073937] env[63241]: DEBUG oslo_concurrency.lockutils [None req-34c400c0-d54c-4936-989e-47fa87df2ddb tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "eb8e453e-76bf-4489-9a5f-9b15e03cd6ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.240s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.154821] env[63241]: INFO nova.compute.manager [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Took 11.91 seconds to build instance. [ 1935.202060] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f60232-ac71-4dd9-9410-2f23509fc6cf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.210822] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e01d96-e0fe-4bd6-ab67-0fe4aa3abebc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.242074] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7702db72-edec-47f5-a49f-16d2336ba716 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.249503] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c1be92-3051-40ca-bf53-79beeb43a474 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.263113] env[63241]: DEBUG nova.compute.provider_tree [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1935.321486] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821276, 'name': CreateVM_Task, 'duration_secs': 0.240434} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.321655] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1935.322045] env[63241]: DEBUG oslo_concurrency.lockutils [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.322207] env[63241]: DEBUG oslo_concurrency.lockutils [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.322531] env[63241]: DEBUG oslo_concurrency.lockutils [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1935.322764] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82a93b84-5656-4279-8ccb-280309d48651 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.327504] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1935.327504] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5281fa0a-f4c7-7fb6-8448-32d8f27d14d7" [ 1935.327504] env[63241]: _type = "Task" [ 1935.327504] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.334861] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5281fa0a-f4c7-7fb6-8448-32d8f27d14d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.501500] env[63241]: INFO nova.compute.manager [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Rescuing [ 1935.501783] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.501939] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.502127] env[63241]: DEBUG nova.network.neutron [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1935.521491] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.657180] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f686f838-0fe5-4ebe-a853-6f348e5b829b tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.425s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.765737] env[63241]: DEBUG nova.scheduler.client.report [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1935.838032] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5281fa0a-f4c7-7fb6-8448-32d8f27d14d7, 'name': SearchDatastore_Task, 'duration_secs': 0.010174} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.838436] env[63241]: DEBUG oslo_concurrency.lockutils [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1935.838738] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1935.839053] env[63241]: DEBUG oslo_concurrency.lockutils [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.839283] env[63241]: DEBUG oslo_concurrency.lockutils [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.839547] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1935.839831] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6e52e79-4f5c-4d55-88f2-f3a9e080456d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.848257] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1935.848415] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1935.849125] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8347b71b-a265-4738-82e2-15c63ebd220e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.853889] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1935.853889] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52267a67-7aae-8e95-f03e-0aee39182d48" [ 1935.853889] env[63241]: _type = "Task" [ 1935.853889] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.861182] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52267a67-7aae-8e95-f03e-0aee39182d48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.271140] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.723s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.273788] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.752s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.274140] env[63241]: DEBUG nova.objects.instance [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lazy-loading 'resources' on Instance uuid c0ea8cf6-4023-4093-b0bc-67b02604125a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1936.298016] env[63241]: INFO nova.scheduler.client.report [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Deleted allocations for instance c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a [ 1936.341228] env[63241]: DEBUG nova.network.neutron [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Updating instance_info_cache with network_info: [{"id": "5fc0cb3d-1a42-482e-a23f-9864a91d89c0", "address": "fa:16:3e:65:38:e9", "network": {"id": "93c05514-d892-481d-9774-124866cb4462", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-742911692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f7af6cf881f84203a7f0a546466bf76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc0cb3d-1a", "ovs_interfaceid": "5fc0cb3d-1a42-482e-a23f-9864a91d89c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.367856] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52267a67-7aae-8e95-f03e-0aee39182d48, 'name': SearchDatastore_Task, 'duration_secs': 0.008111} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.369299] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0289a5ca-bcb6-4988-baf3-d2975ff34872 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.375783] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1936.375783] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52429551-40d5-afb5-2b7b-95212cf5d5a5" [ 1936.375783] env[63241]: _type = "Task" [ 1936.375783] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.387789] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52429551-40d5-afb5-2b7b-95212cf5d5a5, 'name': SearchDatastore_Task, 'duration_secs': 0.009796} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.388045] env[63241]: DEBUG oslo_concurrency.lockutils [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.388296] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f55ed224-90d4-4fdc-bd78-d1cfb9f641e4/f55ed224-90d4-4fdc-bd78-d1cfb9f641e4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1936.388555] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de92b06a-1b6c-45ec-a1e3-74cecf4d7b8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.395492] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1936.395492] env[63241]: value = "task-1821277" [ 1936.395492] env[63241]: _type = "Task" [ 1936.395492] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.402427] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821277, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.812307] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9cd565bc-de72-41a8-bed4-e489274e4756 tempest-MultipleCreateTestJSON-2124798476 tempest-MultipleCreateTestJSON-2124798476-project-member] Lock "c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.060s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.844468] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Releasing lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.907493] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821277, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45802} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.907761] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] f55ed224-90d4-4fdc-bd78-d1cfb9f641e4/f55ed224-90d4-4fdc-bd78-d1cfb9f641e4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1936.907972] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1936.908246] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-418c40af-026e-4389-be87-3b8665d818dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.911148] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3ee69d-1c1e-4b3b-acf5-4b4c27ed8566 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.919509] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7442fc6e-6451-4df0-a220-bcd662b0107b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.922805] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1936.922805] env[63241]: value = "task-1821278" [ 1936.922805] env[63241]: _type = "Task" [ 1936.922805] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.950665] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df473ef-9021-4c21-b341-df79e64fe70d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.955900] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821278, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.960849] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7563f34-af83-4a26-85ad-1d539c4e4867 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.975978] env[63241]: DEBUG nova.compute.provider_tree [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1937.376299] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1937.376516] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50f6d351-5638-42a8-bd66-cca46a811d42 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.385078] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1937.385078] env[63241]: value = "task-1821279" [ 1937.385078] env[63241]: _type = "Task" [ 1937.385078] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.393853] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.433678] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821278, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074612} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.433678] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1937.434392] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4887787-d42e-44e6-a510-43c5cae0b5d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.455051] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] f55ed224-90d4-4fdc-bd78-d1cfb9f641e4/f55ed224-90d4-4fdc-bd78-d1cfb9f641e4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1937.455051] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77ead6ad-3a86-442f-8f56-d075eb23259b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.474887] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1937.474887] env[63241]: value = "task-1821280" [ 1937.474887] env[63241]: _type = "Task" [ 1937.474887] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.481076] env[63241]: DEBUG nova.scheduler.client.report [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1937.486489] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821280, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.895839] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821279, 'name': PowerOffVM_Task, 'duration_secs': 0.344428} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.896222] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1937.896955] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45db4d99-30d3-4ff2-9d46-9ec621b617a7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.916522] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c471ec7b-e727-44fa-9ecb-c82097bfd448 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.949325] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1937.949695] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c2cf8ad-ae93-4339-9c84-3d2de650c711 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.956248] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1937.956248] env[63241]: value = "task-1821281" [ 1937.956248] env[63241]: _type = "Task" [ 1937.956248] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.966378] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1937.966615] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1937.966897] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1937.967095] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.967309] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1937.967558] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a7f5f15-e543-4fe8-a558-9070a48af752 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.975764] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1937.975942] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1937.979220] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c17f7224-55bf-44ee-8fba-6c3f93bddbab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.985762] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821280, 'name': ReconfigVM_Task, 'duration_secs': 0.480718} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.986936] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Reconfigured VM instance instance-00000073 to attach disk [datastore1] f55ed224-90d4-4fdc-bd78-d1cfb9f641e4/f55ed224-90d4-4fdc-bd78-d1cfb9f641e4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1937.987594] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1937.987594] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e3d9e2-bdaa-9b0e-56ac-49ac89cda9f9" [ 1937.987594] env[63241]: _type = "Task" [ 1937.987594] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.988237] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.989932] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-154adcbb-cd1c-45fe-95ee-d62ffbe21b39 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.999408] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e3d9e2-bdaa-9b0e-56ac-49ac89cda9f9, 'name': SearchDatastore_Task, 'duration_secs': 0.011306} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.001850] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1938.001850] env[63241]: value = "task-1821282" [ 1938.001850] env[63241]: _type = "Task" [ 1938.001850] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.001850] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95fe43b8-1efa-42cf-bd16-fa81b0399cca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.013808] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821282, 'name': Rename_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.014816] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1938.014816] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c9ec58-5fb5-a8a3-c39e-7817b42e1834" [ 1938.014816] env[63241]: _type = "Task" [ 1938.014816] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.023080] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c9ec58-5fb5-a8a3-c39e-7817b42e1834, 'name': SearchDatastore_Task, 'duration_secs': 0.009969} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.024115] env[63241]: INFO nova.scheduler.client.report [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Deleted allocations for instance c0ea8cf6-4023-4093-b0bc-67b02604125a [ 1938.025197] env[63241]: DEBUG oslo_concurrency.lockutils [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.025477] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] dfbe2f8b-e750-45b6-bc90-5021b3c0e267/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. {{(pid=63241) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1938.027970] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c3c19fb-150d-41a3-9159-87b61be13208 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.035147] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1938.035147] env[63241]: value = "task-1821283" [ 1938.035147] env[63241]: _type = "Task" [ 1938.035147] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.043461] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821283, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.516993] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821282, 'name': Rename_Task, 'duration_secs': 0.134579} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.516993] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1938.517469] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98fa5548-f5e4-42a0-b57f-d3ae6d88a805 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.524143] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1938.524143] env[63241]: value = "task-1821284" [ 1938.524143] env[63241]: _type = "Task" [ 1938.524143] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.534694] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821284, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.537023] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6a6efe1f-fbac-4d69-a24e-a8f76806d51c tempest-AttachVolumeTestJSON-901364898 tempest-AttachVolumeTestJSON-901364898-project-member] Lock "c0ea8cf6-4023-4093-b0bc-67b02604125a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.263s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1938.545202] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821283, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438316} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.545640] env[63241]: INFO nova.virt.vmwareapi.ds_util [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] dfbe2f8b-e750-45b6-bc90-5021b3c0e267/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk. [ 1938.549190] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5831d558-ad4e-4448-8ea1-26cf98d76622 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.578799] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] dfbe2f8b-e750-45b6-bc90-5021b3c0e267/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1938.580124] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af227ed1-8712-45e8-b259-71f21b6ec2e6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.600401] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1938.600401] env[63241]: value = "task-1821285" [ 1938.600401] env[63241]: _type = "Task" [ 1938.600401] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.610084] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821285, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.033551] env[63241]: DEBUG oslo_vmware.api [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821284, 'name': PowerOnVM_Task, 'duration_secs': 0.493514} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.033884] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1939.034034] env[63241]: DEBUG nova.compute.manager [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1939.034800] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450509da-a9cd-417e-90e1-a1f35c2a3168 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.111142] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821285, 'name': ReconfigVM_Task, 'duration_secs': 0.369708} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.111449] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Reconfigured VM instance instance-00000074 to attach disk [datastore1] dfbe2f8b-e750-45b6-bc90-5021b3c0e267/e128f8d9-813d-4846-9a6e-b4c4717cd5b4-rescue.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1939.112345] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51a9b86-9dde-4d05-b419-44787d0ffd32 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.138680] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fda8c5b-2a04-4aff-a961-2260f455aeaa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.155154] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1939.155154] env[63241]: value = "task-1821287" [ 1939.155154] env[63241]: _type = "Task" [ 1939.155154] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.163288] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821287, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.553154] env[63241]: DEBUG oslo_concurrency.lockutils [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.553435] env[63241]: DEBUG oslo_concurrency.lockutils [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.553666] env[63241]: DEBUG nova.objects.instance [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1939.666776] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821287, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.762675] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquiring lock "f55ed224-90d4-4fdc-bd78-d1cfb9f641e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.763356] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "f55ed224-90d4-4fdc-bd78-d1cfb9f641e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.763440] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquiring lock "f55ed224-90d4-4fdc-bd78-d1cfb9f641e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.764101] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "f55ed224-90d4-4fdc-bd78-d1cfb9f641e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.764101] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "f55ed224-90d4-4fdc-bd78-d1cfb9f641e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.765925] env[63241]: INFO nova.compute.manager [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Terminating instance [ 1939.767637] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquiring lock "refresh_cache-f55ed224-90d4-4fdc-bd78-d1cfb9f641e4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.767798] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquired lock "refresh_cache-f55ed224-90d4-4fdc-bd78-d1cfb9f641e4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1939.768109] env[63241]: DEBUG nova.network.neutron [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1940.165737] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821287, 'name': ReconfigVM_Task, 'duration_secs': 0.522622} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.166036] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1940.166292] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cdde101-e795-4652-9ab0-af187bd9819e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.172896] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1940.172896] env[63241]: value = "task-1821288" [ 1940.172896] env[63241]: _type = "Task" [ 1940.172896] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.180458] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821288, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.285600] env[63241]: DEBUG nova.network.neutron [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1940.343464] env[63241]: DEBUG nova.network.neutron [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.563165] env[63241]: DEBUG oslo_concurrency.lockutils [None req-124bff1c-9d54-4e0b-9ae4-0795c5f8cf10 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.682955] env[63241]: DEBUG oslo_vmware.api [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821288, 'name': PowerOnVM_Task, 'duration_secs': 0.47037} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.683312] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1940.686012] env[63241]: DEBUG nova.compute.manager [None req-41b7f41e-4f83-4bd9-8fd7-f4e83dd34b24 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1940.686836] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b068bbf7-f60b-4646-8c60-b117eb11f95f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.846713] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Releasing lock "refresh_cache-f55ed224-90d4-4fdc-bd78-d1cfb9f641e4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.847387] env[63241]: DEBUG nova.compute.manager [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1940.847699] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1940.849366] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ddceb5-9fd6-4468-b8be-f2f18080325f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.859761] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1940.860078] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-825e2d9f-10fc-4bab-94d0-8ad11adca753 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.867303] env[63241]: DEBUG oslo_vmware.api [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1940.867303] env[63241]: value = "task-1821289" [ 1940.867303] env[63241]: _type = "Task" [ 1940.867303] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.881079] env[63241]: DEBUG oslo_vmware.api [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821289, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.378693] env[63241]: INFO nova.compute.manager [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Unrescuing [ 1941.378693] env[63241]: DEBUG oslo_concurrency.lockutils [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1941.378693] env[63241]: DEBUG oslo_concurrency.lockutils [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquired lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1941.378693] env[63241]: DEBUG nova.network.neutron [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1941.379893] env[63241]: DEBUG oslo_vmware.api [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821289, 'name': PowerOffVM_Task, 'duration_secs': 0.161048} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.380245] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1941.380416] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1941.380665] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3bf83b7d-281e-4206-be1d-5df86b78e5bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.405409] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1941.405666] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1941.405850] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Deleting the datastore file [datastore1] f55ed224-90d4-4fdc-bd78-d1cfb9f641e4 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1941.406111] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31a70515-448d-4fbd-83c7-32a027a5f529 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.412526] env[63241]: DEBUG oslo_vmware.api [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for the task: (returnval){ [ 1941.412526] env[63241]: value = "task-1821291" [ 1941.412526] env[63241]: _type = "Task" [ 1941.412526] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.420988] env[63241]: DEBUG oslo_vmware.api [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821291, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.922132] env[63241]: DEBUG oslo_vmware.api [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Task: {'id': task-1821291, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091522} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.922394] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1941.922587] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1941.922883] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1941.923269] env[63241]: INFO nova.compute.manager [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1941.923670] env[63241]: DEBUG oslo.service.loopingcall [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1941.923989] env[63241]: DEBUG nova.compute.manager [-] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1941.923989] env[63241]: DEBUG nova.network.neutron [-] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1941.941121] env[63241]: DEBUG nova.network.neutron [-] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1942.112384] env[63241]: DEBUG nova.network.neutron [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Updating instance_info_cache with network_info: [{"id": "5fc0cb3d-1a42-482e-a23f-9864a91d89c0", "address": "fa:16:3e:65:38:e9", "network": {"id": "93c05514-d892-481d-9774-124866cb4462", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-742911692-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f7af6cf881f84203a7f0a546466bf76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fc0cb3d-1a", "ovs_interfaceid": "5fc0cb3d-1a42-482e-a23f-9864a91d89c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.444457] env[63241]: DEBUG nova.network.neutron [-] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.615536] env[63241]: DEBUG oslo_concurrency.lockutils [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Releasing lock "refresh_cache-dfbe2f8b-e750-45b6-bc90-5021b3c0e267" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1942.616258] env[63241]: DEBUG nova.objects.instance [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lazy-loading 'flavor' on Instance uuid dfbe2f8b-e750-45b6-bc90-5021b3c0e267 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1942.947490] env[63241]: INFO nova.compute.manager [-] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Took 1.02 seconds to deallocate network for instance. [ 1943.121906] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9987304-a637-4514-af16-984ace0e2bab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.144686] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1943.144993] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48ffb3da-e851-45da-ba36-cdcdee55d9ab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.151291] env[63241]: DEBUG oslo_vmware.api [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1943.151291] env[63241]: value = "task-1821293" [ 1943.151291] env[63241]: _type = "Task" [ 1943.151291] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.159806] env[63241]: DEBUG oslo_vmware.api [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821293, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.454467] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.454830] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.454964] env[63241]: DEBUG nova.objects.instance [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lazy-loading 'resources' on Instance uuid f55ed224-90d4-4fdc-bd78-d1cfb9f641e4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1943.661554] env[63241]: DEBUG oslo_vmware.api [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821293, 'name': PowerOffVM_Task, 'duration_secs': 0.224194} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.661883] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1943.667344] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Reconfiguring VM instance instance-00000074 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1943.667651] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89d87f6f-0516-419f-baa6-cf194ed2ea6f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.685765] env[63241]: DEBUG oslo_vmware.api [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1943.685765] env[63241]: value = "task-1821294" [ 1943.685765] env[63241]: _type = "Task" [ 1943.685765] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.693317] env[63241]: DEBUG oslo_vmware.api [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821294, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.064174] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7158ac36-ce62-4a55-9310-1bd48a64dd99 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.071876] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052d1d34-dab7-4408-bc47-0c8199385eed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.101038] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40953739-5608-412d-b6e4-34ede20e8887 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.108143] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76012d5c-5a03-4a0a-823d-6813c7c1a828 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.121129] env[63241]: DEBUG nova.compute.provider_tree [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1944.197224] env[63241]: DEBUG oslo_vmware.api [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821294, 'name': ReconfigVM_Task, 'duration_secs': 0.207344} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.197492] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Reconfigured VM instance instance-00000074 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1944.197679] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1944.197908] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92a222ab-ff73-40ff-8068-429b9c8c7066 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.204335] env[63241]: DEBUG oslo_vmware.api [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1944.204335] env[63241]: value = "task-1821295" [ 1944.204335] env[63241]: _type = "Task" [ 1944.204335] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.211641] env[63241]: DEBUG oslo_vmware.api [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.624655] env[63241]: DEBUG nova.scheduler.client.report [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1944.714779] env[63241]: DEBUG oslo_vmware.api [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821295, 'name': PowerOnVM_Task, 'duration_secs': 0.389046} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.715059] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1944.715288] env[63241]: DEBUG nova.compute.manager [None req-30835419-2898-477d-9da4-e17abe744302 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1944.716102] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf844949-5c24-468a-86f3-684791aee032 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.131080] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.675s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.151569] env[63241]: INFO nova.scheduler.client.report [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Deleted allocations for instance f55ed224-90d4-4fdc-bd78-d1cfb9f641e4 [ 1945.658480] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f55fe271-a3e2-4aa7-9dfe-7041edf2e5d1 tempest-ServerShowV257Test-333174980 tempest-ServerShowV257Test-333174980-project-member] Lock "f55ed224-90d4-4fdc-bd78-d1cfb9f641e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.895s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.986542] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.986823] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.987046] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.987237] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.987407] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.990899] env[63241]: INFO nova.compute.manager [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Terminating instance [ 1945.992876] env[63241]: DEBUG nova.compute.manager [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1945.993073] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1945.993972] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8787e5-d6bb-47f9-87bd-cc669363c4a6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.001959] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1946.002215] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68638e3c-a123-40e2-a47d-eeac92d9d1b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.009033] env[63241]: DEBUG oslo_vmware.api [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1946.009033] env[63241]: value = "task-1821297" [ 1946.009033] env[63241]: _type = "Task" [ 1946.009033] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.018242] env[63241]: DEBUG oslo_vmware.api [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.518653] env[63241]: DEBUG oslo_vmware.api [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821297, 'name': PowerOffVM_Task, 'duration_secs': 0.157765} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.518963] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1946.519091] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1946.519343] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb155d8e-0296-443c-baed-ef4c465ed027 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.541511] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.541717] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1946.582760] env[63241]: DEBUG oslo_concurrency.lockutils [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.583009] env[63241]: DEBUG oslo_concurrency.lockutils [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.047300] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1947.047664] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1947.086260] env[63241]: INFO nova.compute.manager [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Detaching volume ddd89002-2b7d-4b7a-a368-0b2fe43b975a [ 1947.117464] env[63241]: INFO nova.virt.block_device [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Attempting to driver detach volume ddd89002-2b7d-4b7a-a368-0b2fe43b975a from mountpoint /dev/sdb [ 1947.117710] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1947.117900] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377212', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'name': 'volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '779d2380-be6c-4fdb-8755-10e99f8a6fd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'serial': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1947.118806] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe36d16-664d-4404-95bc-5ac3c28a3a3a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.140951] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12906041-c962-4773-b9b8-7bf29cfd314d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.148281] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d7543f-b93f-45ad-a55e-2768c0d614b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.167866] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2afe6763-7d2e-4ab8-8975-f1cfe76e54b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.181842] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] The volume has not been displaced from its original location: [datastore1] volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a/volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1947.187088] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfiguring VM instance instance-00000066 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1947.187347] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c80ecf7-6fba-46da-ae01-a9c141b846eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.204811] env[63241]: DEBUG oslo_vmware.api [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1947.204811] env[63241]: value = "task-1821299" [ 1947.204811] env[63241]: _type = "Task" [ 1947.204811] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.212222] env[63241]: DEBUG oslo_vmware.api [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821299, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.714748] env[63241]: DEBUG oslo_vmware.api [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821299, 'name': ReconfigVM_Task, 'duration_secs': 0.229332} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.715036] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Reconfigured VM instance instance-00000066 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1947.719663] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91eae9b2-9f49-4c2f-9163-6373dc231b23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.734261] env[63241]: DEBUG oslo_vmware.api [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1947.734261] env[63241]: value = "task-1821300" [ 1947.734261] env[63241]: _type = "Task" [ 1947.734261] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.743620] env[63241]: DEBUG oslo_vmware.api [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821300, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.955195] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1947.955354] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1947.955579] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Deleting the datastore file [datastore1] dfbe2f8b-e750-45b6-bc90-5021b3c0e267 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1947.955867] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55c054eb-245f-423c-8737-7390f9c20ed3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.962079] env[63241]: DEBUG oslo_vmware.api [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1947.962079] env[63241]: value = "task-1821301" [ 1947.962079] env[63241]: _type = "Task" [ 1947.962079] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.969449] env[63241]: DEBUG oslo_vmware.api [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.245097] env[63241]: DEBUG oslo_vmware.api [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821300, 'name': ReconfigVM_Task, 'duration_secs': 0.148823} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.245424] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377212', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'name': 'volume-ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '779d2380-be6c-4fdb-8755-10e99f8a6fd9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a', 'serial': 'ddd89002-2b7d-4b7a-a368-0b2fe43b975a'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1948.472380] env[63241]: DEBUG oslo_vmware.api [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140765} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.472613] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1948.472759] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1948.472934] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1948.473124] env[63241]: INFO nova.compute.manager [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Took 2.48 seconds to destroy the instance on the hypervisor. [ 1948.473364] env[63241]: DEBUG oslo.service.loopingcall [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1948.473587] env[63241]: DEBUG nova.compute.manager [-] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1948.473671] env[63241]: DEBUG nova.network.neutron [-] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1948.789879] env[63241]: DEBUG nova.objects.instance [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lazy-loading 'flavor' on Instance uuid 779d2380-be6c-4fdb-8755-10e99f8a6fd9 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1948.979723] env[63241]: DEBUG nova.compute.manager [req-65732b0a-d6f6-4ccc-a21d-2313cde45d30 req-14eccb9c-4e66-40f8-862e-50dfd9ead822 service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Received event network-vif-deleted-5fc0cb3d-1a42-482e-a23f-9864a91d89c0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1948.979793] env[63241]: INFO nova.compute.manager [req-65732b0a-d6f6-4ccc-a21d-2313cde45d30 req-14eccb9c-4e66-40f8-862e-50dfd9ead822 service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Neutron deleted interface 5fc0cb3d-1a42-482e-a23f-9864a91d89c0; detaching it from the instance and deleting it from the info cache [ 1948.980024] env[63241]: DEBUG nova.network.neutron [req-65732b0a-d6f6-4ccc-a21d-2313cde45d30 req-14eccb9c-4e66-40f8-862e-50dfd9ead822 service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1949.365057] env[63241]: DEBUG nova.network.neutron [-] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1949.482940] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95524596-c847-400e-9403-c13c8bb88cfc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.493168] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37f5b1a-89ca-4bd3-9033-4845d59a2a2a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.521556] env[63241]: DEBUG nova.compute.manager [req-65732b0a-d6f6-4ccc-a21d-2313cde45d30 req-14eccb9c-4e66-40f8-862e-50dfd9ead822 service nova] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Detach interface failed, port_id=5fc0cb3d-1a42-482e-a23f-9864a91d89c0, reason: Instance dfbe2f8b-e750-45b6-bc90-5021b3c0e267 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1949.798350] env[63241]: DEBUG oslo_concurrency.lockutils [None req-787910e8-cc05-442d-be00-9a024b1492e4 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.214s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.869455] env[63241]: INFO nova.compute.manager [-] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Took 1.39 seconds to deallocate network for instance. [ 1950.375362] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.375657] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.375879] env[63241]: DEBUG nova.objects.instance [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lazy-loading 'resources' on Instance uuid dfbe2f8b-e750-45b6-bc90-5021b3c0e267 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1950.597309] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1950.597457] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1950.597607] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1950.837671] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.837932] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.838143] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.838332] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.838514] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.841023] env[63241]: INFO nova.compute.manager [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Terminating instance [ 1950.842963] env[63241]: DEBUG nova.compute.manager [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1950.843238] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1950.844348] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ce7774-96de-4504-8757-00926197d297 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.852384] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1950.852615] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90c8759a-9550-432c-9c5b-cbf2e0c1556f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.859640] env[63241]: DEBUG oslo_vmware.api [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1950.859640] env[63241]: value = "task-1821302" [ 1950.859640] env[63241]: _type = "Task" [ 1950.859640] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.867206] env[63241]: DEBUG oslo_vmware.api [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.992268] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2af5510-93f4-41dd-b0f5-298b20c35db8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.997809] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "26b75825-49c4-4870-957a-a2a76a970880" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.998048] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.002600] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c09d48-af76-4206-b02c-e070c228890b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.034936] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b723ed-4ff7-4666-b9d6-0c0ce79ccea4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.042738] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722a6e6a-1ece-4471-8c97-3eeccad57ce9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.056922] env[63241]: DEBUG nova.compute.provider_tree [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1951.264771] env[63241]: DEBUG oslo_concurrency.lockutils [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.265064] env[63241]: DEBUG oslo_concurrency.lockutils [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.265225] env[63241]: INFO nova.compute.manager [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Shelving [ 1951.373715] env[63241]: DEBUG oslo_vmware.api [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821302, 'name': PowerOffVM_Task, 'duration_secs': 0.237083} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.374170] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1951.374444] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1951.374806] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b9ed9c4-f2a8-4fa6-8df7-c07ce4a02c28 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.503314] env[63241]: DEBUG nova.compute.utils [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1951.560591] env[63241]: DEBUG nova.scheduler.client.report [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1951.772258] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1951.772514] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45ef72e2-7041-4668-a96f-fb440729da75 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.808397] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance_info_cache with network_info: [{"id": "6be29b8b-a0d6-4346-b774-5faf878f177c", "address": "fa:16:3e:0f:60:27", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be29b8b-a0", "ovs_interfaceid": "6be29b8b-a0d6-4346-b774-5faf878f177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1951.840909] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1951.840909] env[63241]: value = "task-1821304" [ 1951.840909] env[63241]: _type = "Task" [ 1951.840909] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.851139] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821304, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.852252] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1951.852450] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1951.852631] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleting the datastore file [datastore1] 779d2380-be6c-4fdb-8755-10e99f8a6fd9 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1951.852875] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c55754a3-8c50-48de-aa86-bb951afe460c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.858486] env[63241]: DEBUG oslo_vmware.api [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1951.858486] env[63241]: value = "task-1821305" [ 1951.858486] env[63241]: _type = "Task" [ 1951.858486] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.867667] env[63241]: DEBUG oslo_vmware.api [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.006702] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.065309] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.689s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.205734] env[63241]: INFO nova.scheduler.client.report [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Deleted allocations for instance dfbe2f8b-e750-45b6-bc90-5021b3c0e267 [ 1952.310783] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1952.310981] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1952.311199] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.311599] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.311765] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.311914] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.312065] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.312213] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.312340] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1952.312478] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.352124] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821304, 'name': PowerOffVM_Task, 'duration_secs': 0.256173} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.352429] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1952.353275] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641c21c5-5ad4-40e8-aaf9-6343b465d40c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.377081] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e20c8d-e73d-49aa-bb2e-c100db33d223 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.384580] env[63241]: DEBUG oslo_vmware.api [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172639} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.386349] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1952.386571] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1952.386790] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1952.386999] env[63241]: INFO nova.compute.manager [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Took 1.54 seconds to destroy the instance on the hypervisor. [ 1952.387281] env[63241]: DEBUG oslo.service.loopingcall [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1952.389779] env[63241]: DEBUG nova.compute.manager [-] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1952.389931] env[63241]: DEBUG nova.network.neutron [-] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1952.712917] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d1faa924-7e9e-4f78-9dd2-a64c485cd0eb tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "dfbe2f8b-e750-45b6-bc90-5021b3c0e267" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.726s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.815756] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.815901] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.816114] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.816282] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1952.817185] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9251401c-de19-4cdb-beb5-aa5a24e73329 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.826210] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379e6cfd-fbd6-448c-8000-f37eb97e30cc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.833032] env[63241]: DEBUG nova.compute.manager [req-f2633ab8-7240-4558-8486-5e8240df74af req-9bedf7b5-0f2d-49bd-bb64-9e868ec97b94 service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Received event network-vif-deleted-b434fe5e-e77d-4974-8bd4-7226a359e28d {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1952.833146] env[63241]: INFO nova.compute.manager [req-f2633ab8-7240-4558-8486-5e8240df74af req-9bedf7b5-0f2d-49bd-bb64-9e868ec97b94 service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Neutron deleted interface b434fe5e-e77d-4974-8bd4-7226a359e28d; detaching it from the instance and deleting it from the info cache [ 1952.833313] env[63241]: DEBUG nova.network.neutron [req-f2633ab8-7240-4558-8486-5e8240df74af req-9bedf7b5-0f2d-49bd-bb64-9e868ec97b94 service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1952.842933] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de97238-a0b6-430b-b4c5-22e5e26ddfe9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.852359] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421358b9-9cc5-4aa5-a7f0-161a9947cb26 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.881326] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179680MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1952.881489] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.881710] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.893511] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1952.893852] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0746fd63-00f5-41d2-a340-ea9b91ff866c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.900885] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1952.900885] env[63241]: value = "task-1821306" [ 1952.900885] env[63241]: _type = "Task" [ 1952.900885] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.909087] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821306, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.064529] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "26b75825-49c4-4870-957a-a2a76a970880" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.064891] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.065082] env[63241]: INFO nova.compute.manager [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Attaching volume dd7c0b60-1dcf-4962-be80-f4c646442775 to /dev/sdb [ 1953.100674] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fd0dfa-af09-4ec2-8c39-02b212dbfeb1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.108023] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c302e0f9-3148-4e65-bfc2-a313c55312b8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.121308] env[63241]: DEBUG nova.virt.block_device [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Updating existing volume attachment record: 961036f5-9531-4639-a535-a92b8907f5f6 {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1953.311260] env[63241]: DEBUG nova.network.neutron [-] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1953.343408] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a1bd047-876e-4f3f-a523-85caaf83db7d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.353160] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562cb4e8-4881-42e9-bb81-6609bda632e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.388460] env[63241]: DEBUG nova.compute.manager [req-f2633ab8-7240-4558-8486-5e8240df74af req-9bedf7b5-0f2d-49bd-bb64-9e868ec97b94 service nova] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Detach interface failed, port_id=b434fe5e-e77d-4974-8bd4-7226a359e28d, reason: Instance 779d2380-be6c-4fdb-8755-10e99f8a6fd9 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1953.411178] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821306, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.672765] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.673081] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.673269] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.673439] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.673630] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.675899] env[63241]: INFO nova.compute.manager [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Terminating instance [ 1953.677598] env[63241]: DEBUG nova.compute.manager [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1953.677794] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1953.678634] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ee6e4f-a837-40e3-a347-75f1970f6018 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.687355] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1953.687637] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0e11305-074e-4727-9059-56da38227962 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.694775] env[63241]: DEBUG oslo_vmware.api [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1953.694775] env[63241]: value = "task-1821308" [ 1953.694775] env[63241]: _type = "Task" [ 1953.694775] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.702926] env[63241]: DEBUG oslo_vmware.api [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.814936] env[63241]: INFO nova.compute.manager [-] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Took 1.42 seconds to deallocate network for instance. [ 1953.912631] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821306, 'name': CreateSnapshot_Task, 'duration_secs': 0.54668} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.913829] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 31e84206-e583-4610-969e-2ccae2d0b206 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1953.913978] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e28ba013-0bc5-4edc-858d-674980bc8742 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1953.914116] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 779d2380-be6c-4fdb-8755-10e99f8a6fd9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1953.914236] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e4514260-dfcc-45a3-80d5-b5484b0b599c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1953.914351] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 26b75825-49c4-4870-957a-a2a76a970880 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1953.914464] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1953.914578] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 215f658f-2af6-4525-b94c-489ad794e6f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1953.914766] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1953.914907] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1953.917243] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1953.918629] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c55f1a-64b1-4929-92d0-b48e0bedc766 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.016739] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131ac67a-f081-47ed-95fc-2247ba01292c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.025302] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae7f527-0fed-4cad-89ce-58dc80880400 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.058990] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f16e65d-7959-4233-91d0-fefbddcbb782 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.067671] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2751813-c233-4243-b34f-eb15ccd008f2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.083813] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1954.206101] env[63241]: DEBUG oslo_vmware.api [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821308, 'name': PowerOffVM_Task, 'duration_secs': 0.240745} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.206385] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1954.206557] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1954.206819] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-488ec892-f369-4b90-9afd-d62a3f2fff55 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.308080] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1954.308341] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1954.308532] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Deleting the datastore file [datastore1] 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1954.309015] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fef430d6-69e3-4993-a9c7-45d193ecb28a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.315711] env[63241]: DEBUG oslo_vmware.api [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for the task: (returnval){ [ 1954.315711] env[63241]: value = "task-1821310" [ 1954.315711] env[63241]: _type = "Task" [ 1954.315711] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.321426] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.324849] env[63241]: DEBUG oslo_vmware.api [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.437336] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1954.437694] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-eceaa39f-520d-4e9e-8394-4c319bb166d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.447324] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1954.447324] env[63241]: value = "task-1821311" [ 1954.447324] env[63241]: _type = "Task" [ 1954.447324] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.456158] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821311, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.587646] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1954.826293] env[63241]: DEBUG oslo_vmware.api [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821310, 'name': DeleteDatastoreFile_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.958727] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821311, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.093387] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1955.093756] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.212s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.094168] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.773s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.094528] env[63241]: DEBUG nova.objects.instance [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lazy-loading 'resources' on Instance uuid 779d2380-be6c-4fdb-8755-10e99f8a6fd9 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1955.325658] env[63241]: DEBUG oslo_vmware.api [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Task: {'id': task-1821310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.510963} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.325923] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1955.326124] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1955.326306] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1955.326478] env[63241]: INFO nova.compute.manager [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1955.326716] env[63241]: DEBUG oslo.service.loopingcall [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1955.326910] env[63241]: DEBUG nova.compute.manager [-] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1955.327010] env[63241]: DEBUG nova.network.neutron [-] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1955.459698] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821311, 'name': CloneVM_Task} progress is 95%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.616670] env[63241]: DEBUG nova.compute.manager [req-41865f32-44c2-438c-ad39-65dc3c65bbe2 req-53315cdf-54a4-48a7-a649-868e094e4144 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Received event network-vif-deleted-e59b20e5-cfbf-45bb-beb1-675a18f1cb97 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1955.616804] env[63241]: INFO nova.compute.manager [req-41865f32-44c2-438c-ad39-65dc3c65bbe2 req-53315cdf-54a4-48a7-a649-868e094e4144 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Neutron deleted interface e59b20e5-cfbf-45bb-beb1-675a18f1cb97; detaching it from the instance and deleting it from the info cache [ 1955.616974] env[63241]: DEBUG nova.network.neutron [req-41865f32-44c2-438c-ad39-65dc3c65bbe2 req-53315cdf-54a4-48a7-a649-868e094e4144 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.689867] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb25597-5bb8-46f1-92a8-f587adca5c98 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.697589] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7d2b89-75f2-4ef6-aaed-67d5d851aba9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.727437] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae39d7f-332f-4339-b7dd-457911736f65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.734997] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64f85b9-1392-42be-9c56-05f1ca7dab27 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.748612] env[63241]: DEBUG nova.compute.provider_tree [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1955.959491] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821311, 'name': CloneVM_Task, 'duration_secs': 1.213299} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.959804] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Created linked-clone VM from snapshot [ 1955.960507] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dabda74-daef-4fae-a856-c0959de669b4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.968190] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Uploading image 9f42e472-9966-4b9b-a13b-52502ede2ea3 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1955.993492] env[63241]: DEBUG oslo_vmware.rw_handles [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1955.993492] env[63241]: value = "vm-377231" [ 1955.993492] env[63241]: _type = "VirtualMachine" [ 1955.993492] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1955.993886] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-419f1083-40b8-481a-a3b3-dd7929dddc1a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.001750] env[63241]: DEBUG oslo_vmware.rw_handles [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lease: (returnval){ [ 1956.001750] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526af1b7-5fc1-ac3e-f28d-514b91409aed" [ 1956.001750] env[63241]: _type = "HttpNfcLease" [ 1956.001750] env[63241]: } obtained for exporting VM: (result){ [ 1956.001750] env[63241]: value = "vm-377231" [ 1956.001750] env[63241]: _type = "VirtualMachine" [ 1956.001750] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1956.002044] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the lease: (returnval){ [ 1956.002044] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526af1b7-5fc1-ac3e-f28d-514b91409aed" [ 1956.002044] env[63241]: _type = "HttpNfcLease" [ 1956.002044] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1956.008675] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1956.008675] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526af1b7-5fc1-ac3e-f28d-514b91409aed" [ 1956.008675] env[63241]: _type = "HttpNfcLease" [ 1956.008675] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1956.095759] env[63241]: DEBUG nova.network.neutron [-] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1956.119440] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f11fcab7-19ec-4f7d-9534-7e1d721b8d6a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.128514] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0585e3e5-991a-48c6-8883-f377f6830756 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.157834] env[63241]: DEBUG nova.compute.manager [req-41865f32-44c2-438c-ad39-65dc3c65bbe2 req-53315cdf-54a4-48a7-a649-868e094e4144 service nova] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Detach interface failed, port_id=e59b20e5-cfbf-45bb-beb1-675a18f1cb97, reason: Instance 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1956.278975] env[63241]: DEBUG nova.scheduler.client.report [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 171 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1956.279320] env[63241]: DEBUG nova.compute.provider_tree [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 171 to 172 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1956.279519] env[63241]: DEBUG nova.compute.provider_tree [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1956.510689] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1956.510689] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526af1b7-5fc1-ac3e-f28d-514b91409aed" [ 1956.510689] env[63241]: _type = "HttpNfcLease" [ 1956.510689] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1956.510984] env[63241]: DEBUG oslo_vmware.rw_handles [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1956.510984] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526af1b7-5fc1-ac3e-f28d-514b91409aed" [ 1956.510984] env[63241]: _type = "HttpNfcLease" [ 1956.510984] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1956.511713] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1459ee78-4ba6-4bd7-b1b8-c7794944d522 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.518931] env[63241]: DEBUG oslo_vmware.rw_handles [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52357ee6-0b8c-8796-d513-baeec4f97ed9/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1956.519116] env[63241]: DEBUG oslo_vmware.rw_handles [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52357ee6-0b8c-8796-d513-baeec4f97ed9/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1956.598606] env[63241]: INFO nova.compute.manager [-] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Took 1.27 seconds to deallocate network for instance. [ 1956.606212] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9f235c40-f5f0-48a2-b198-d35d55b7ba8d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.784153] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.690s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.802292] env[63241]: INFO nova.scheduler.client.report [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted allocations for instance 779d2380-be6c-4fdb-8755-10e99f8a6fd9 [ 1957.107347] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.107686] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.108075] env[63241]: DEBUG nova.objects.instance [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lazy-loading 'resources' on Instance uuid 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1957.311029] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bbde1132-a58a-45d1-96b6-4c7718d700d6 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "779d2380-be6c-4fdb-8755-10e99f8a6fd9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.473s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.668899] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1957.668899] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377229', 'volume_id': 'dd7c0b60-1dcf-4962-be80-f4c646442775', 'name': 'volume-dd7c0b60-1dcf-4962-be80-f4c646442775', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '26b75825-49c4-4870-957a-a2a76a970880', 'attached_at': '', 'detached_at': '', 'volume_id': 'dd7c0b60-1dcf-4962-be80-f4c646442775', 'serial': 'dd7c0b60-1dcf-4962-be80-f4c646442775'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1957.669096] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73968e6c-8caf-4ed6-81be-bfed6ffac58b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.695098] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caeef3ae-eb62-4c81-9b50-d31c232f1248 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.721061] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] volume-dd7c0b60-1dcf-4962-be80-f4c646442775/volume-dd7c0b60-1dcf-4962-be80-f4c646442775.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1957.724040] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c25d03c9-db5f-4d18-ae13-9fd5970eb5b3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.744125] env[63241]: DEBUG oslo_vmware.api [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1957.744125] env[63241]: value = "task-1821314" [ 1957.744125] env[63241]: _type = "Task" [ 1957.744125] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.748625] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bb458b-62d4-4e35-8a05-865b8cf2f720 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.754486] env[63241]: DEBUG oslo_vmware.api [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821314, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.759065] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084345b9-6c73-4f1f-8940-b161e360163f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.789788] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d65f92-338f-4f8e-a5fb-ad3a45e3746a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.798571] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b202788-023a-4112-b923-73e171391f2b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.816069] env[63241]: DEBUG nova.compute.provider_tree [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1957.914207] env[63241]: INFO nova.compute.manager [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Rebuilding instance [ 1957.955253] env[63241]: DEBUG nova.compute.manager [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1957.956677] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046da860-ade4-44c7-a706-026d22f2282f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.253832] env[63241]: DEBUG oslo_vmware.api [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821314, 'name': ReconfigVM_Task, 'duration_secs': 0.351787} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.254228] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Reconfigured VM instance instance-0000006e to attach disk [datastore1] volume-dd7c0b60-1dcf-4962-be80-f4c646442775/volume-dd7c0b60-1dcf-4962-be80-f4c646442775.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1958.259553] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b0c4e1e-ad25-4648-a559-07045269d40f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.275677] env[63241]: DEBUG oslo_vmware.api [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1958.275677] env[63241]: value = "task-1821315" [ 1958.275677] env[63241]: _type = "Task" [ 1958.275677] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.284339] env[63241]: DEBUG oslo_vmware.api [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821315, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.319616] env[63241]: DEBUG nova.scheduler.client.report [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1958.468032] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1958.468521] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4757e5b5-561f-447d-938c-b9a17f00a71f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.475431] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1958.475431] env[63241]: value = "task-1821316" [ 1958.475431] env[63241]: _type = "Task" [ 1958.475431] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.484575] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.771123] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.771464] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.785784] env[63241]: DEBUG oslo_vmware.api [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821315, 'name': ReconfigVM_Task, 'duration_secs': 0.15278} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.786217] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377229', 'volume_id': 'dd7c0b60-1dcf-4962-be80-f4c646442775', 'name': 'volume-dd7c0b60-1dcf-4962-be80-f4c646442775', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '26b75825-49c4-4870-957a-a2a76a970880', 'attached_at': '', 'detached_at': '', 'volume_id': 'dd7c0b60-1dcf-4962-be80-f4c646442775', 'serial': 'dd7c0b60-1dcf-4962-be80-f4c646442775'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1958.824647] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.717s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.848131] env[63241]: INFO nova.scheduler.client.report [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Deleted allocations for instance 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0 [ 1958.985929] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821316, 'name': PowerOffVM_Task, 'duration_secs': 0.185382} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.986233] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1958.986456] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1958.987264] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75557fcf-c709-4247-8778-a4d445e8195d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.994240] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1958.994477] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7da0e4d2-9cfb-45f3-9728-9a2d5306963d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.072472] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1959.072745] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1959.072976] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleting the datastore file [datastore1] 215f658f-2af6-4525-b94c-489ad794e6f7 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1959.073278] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05b96cac-012b-4821-8ac9-19388657602c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.080069] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1959.080069] env[63241]: value = "task-1821318" [ 1959.080069] env[63241]: _type = "Task" [ 1959.080069] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.087902] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821318, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.274425] env[63241]: DEBUG nova.compute.manager [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1959.355958] env[63241]: DEBUG oslo_concurrency.lockutils [None req-18193960-3ff0-4f9f-b7da-3136369e63b9 tempest-ServerRescueTestJSON-595460963 tempest-ServerRescueTestJSON-595460963-project-member] Lock "77c501b6-9ef7-4ad9-9013-7bf6b773f2e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.683s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.590045] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821318, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141988} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.590378] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1959.590570] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1959.590746] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1959.793207] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.793482] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.795583] env[63241]: INFO nova.compute.claims [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1959.824106] env[63241]: DEBUG nova.objects.instance [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lazy-loading 'flavor' on Instance uuid 26b75825-49c4-4870-957a-a2a76a970880 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1960.329557] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a6535f74-13fb-4044-8289-7d3863d99cfe tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.265s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.562826] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "26b75825-49c4-4870-957a-a2a76a970880" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.563167] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.625305] env[63241]: DEBUG nova.virt.hardware [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1960.625555] env[63241]: DEBUG nova.virt.hardware [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1960.625714] env[63241]: DEBUG nova.virt.hardware [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1960.625895] env[63241]: DEBUG nova.virt.hardware [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1960.626057] env[63241]: DEBUG nova.virt.hardware [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1960.626249] env[63241]: DEBUG nova.virt.hardware [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1960.626459] env[63241]: DEBUG nova.virt.hardware [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1960.626624] env[63241]: DEBUG nova.virt.hardware [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1960.626794] env[63241]: DEBUG nova.virt.hardware [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1960.626960] env[63241]: DEBUG nova.virt.hardware [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1960.627150] env[63241]: DEBUG nova.virt.hardware [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1960.628051] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6916e2f5-7ad1-40d7-8f98-ee6b3ef3b111 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.636501] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a203a9d6-0d66-44ce-9801-210518692534 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.654031] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:b4:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbe70abb-a696-4530-893c-079aa3168dc7', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1960.662808] env[63241]: DEBUG oslo.service.loopingcall [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1960.663131] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1960.663361] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cdc8211c-f5a6-4b0b-a193-62fe1f69c14a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.684412] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1960.684412] env[63241]: value = "task-1821319" [ 1960.684412] env[63241]: _type = "Task" [ 1960.684412] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.692483] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821319, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.924185] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5271371d-a327-4f08-996c-afaf4a811202 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.935464] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31cfaceb-582f-4459-802b-2f140707c94a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.966040] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f9f205-6f10-481d-8e90-9c63d226c28a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.975980] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54937a6d-2d5e-4889-81c7-f8792c3b3dfc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.991730] env[63241]: DEBUG nova.compute.provider_tree [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1961.066786] env[63241]: INFO nova.compute.manager [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Detaching volume dd7c0b60-1dcf-4962-be80-f4c646442775 [ 1961.114627] env[63241]: INFO nova.virt.block_device [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Attempting to driver detach volume dd7c0b60-1dcf-4962-be80-f4c646442775 from mountpoint /dev/sdb [ 1961.114627] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1961.114627] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377229', 'volume_id': 'dd7c0b60-1dcf-4962-be80-f4c646442775', 'name': 'volume-dd7c0b60-1dcf-4962-be80-f4c646442775', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '26b75825-49c4-4870-957a-a2a76a970880', 'attached_at': '', 'detached_at': '', 'volume_id': 'dd7c0b60-1dcf-4962-be80-f4c646442775', 'serial': 'dd7c0b60-1dcf-4962-be80-f4c646442775'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1961.115810] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c1b7e1-316b-4018-b1d0-49cab539940a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.138040] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07649404-b8ef-412f-b561-be975113adb1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.145763] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85d261b-0b20-4f4e-bb30-6103602924c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.170854] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a8ebf9-c5cb-4f44-beab-f3ed835c0acd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.186839] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] The volume has not been displaced from its original location: [datastore1] volume-dd7c0b60-1dcf-4962-be80-f4c646442775/volume-dd7c0b60-1dcf-4962-be80-f4c646442775.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1961.193340] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Reconfiguring VM instance instance-0000006e to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1961.196386] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dfda0766-e4fc-4f6f-9b2f-4d56d446f363 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.215261] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821319, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.216770] env[63241]: DEBUG oslo_vmware.api [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1961.216770] env[63241]: value = "task-1821320" [ 1961.216770] env[63241]: _type = "Task" [ 1961.216770] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.226008] env[63241]: DEBUG oslo_vmware.api [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821320, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.494421] env[63241]: DEBUG nova.scheduler.client.report [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1961.696793] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821319, 'name': CreateVM_Task, 'duration_secs': 0.597927} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.697169] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1961.705572] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.705753] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1961.706100] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1961.706394] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75a57f96-f5c7-4d7c-857a-a515376a07ae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.712126] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1961.712126] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ee8f9b-28b3-b943-f199-0b484aa26e0b" [ 1961.712126] env[63241]: _type = "Task" [ 1961.712126] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.720285] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ee8f9b-28b3-b943-f199-0b484aa26e0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.728079] env[63241]: DEBUG oslo_vmware.api [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821320, 'name': ReconfigVM_Task, 'duration_secs': 0.255897} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.728399] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Reconfigured VM instance instance-0000006e to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1961.733687] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7794247d-b011-492f-8e82-4bc93ca97a50 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.748869] env[63241]: DEBUG oslo_vmware.api [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1961.748869] env[63241]: value = "task-1821321" [ 1961.748869] env[63241]: _type = "Task" [ 1961.748869] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.756894] env[63241]: DEBUG oslo_vmware.api [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821321, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.999666] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.206s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.000307] env[63241]: DEBUG nova.compute.manager [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1962.222934] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ee8f9b-28b3-b943-f199-0b484aa26e0b, 'name': SearchDatastore_Task, 'duration_secs': 0.010018} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.223278] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.223511] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1962.223746] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1962.223930] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1962.224125] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1962.224444] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18488d34-f4e4-4295-90bd-837fefd54592 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.233567] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1962.233773] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1962.234608] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bb79c38-a04a-445e-865d-4bc6d5009869 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.239887] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1962.239887] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52be40b0-555e-701e-e830-0bc2eaca0533" [ 1962.239887] env[63241]: _type = "Task" [ 1962.239887] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.247813] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52be40b0-555e-701e-e830-0bc2eaca0533, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.257377] env[63241]: DEBUG oslo_vmware.api [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821321, 'name': ReconfigVM_Task, 'duration_secs': 0.14146} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.257648] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377229', 'volume_id': 'dd7c0b60-1dcf-4962-be80-f4c646442775', 'name': 'volume-dd7c0b60-1dcf-4962-be80-f4c646442775', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '26b75825-49c4-4870-957a-a2a76a970880', 'attached_at': '', 'detached_at': '', 'volume_id': 'dd7c0b60-1dcf-4962-be80-f4c646442775', 'serial': 'dd7c0b60-1dcf-4962-be80-f4c646442775'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1962.506042] env[63241]: DEBUG nova.compute.utils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1962.507530] env[63241]: DEBUG nova.compute.manager [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1962.507696] env[63241]: DEBUG nova.network.neutron [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1962.547749] env[63241]: DEBUG nova.policy [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de6df2e8caaa4c0c82c94f9d107a8e17', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6964b0dd75c4704b8f5cacd2c8e355f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1962.750499] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52be40b0-555e-701e-e830-0bc2eaca0533, 'name': SearchDatastore_Task, 'duration_secs': 0.013186} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.751373] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d135b300-a525-4202-9475-f954041b14f2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.756574] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1962.756574] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5205d07e-9057-b407-73ef-f01f043db91b" [ 1962.756574] env[63241]: _type = "Task" [ 1962.756574] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.764565] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5205d07e-9057-b407-73ef-f01f043db91b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.807227] env[63241]: DEBUG nova.objects.instance [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lazy-loading 'flavor' on Instance uuid 26b75825-49c4-4870-957a-a2a76a970880 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1962.866982] env[63241]: DEBUG nova.network.neutron [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Successfully created port: 4d328bf7-9485-42e2-9ad2-0b00ac726b9b {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1963.010917] env[63241]: DEBUG nova.compute.manager [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1963.267111] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5205d07e-9057-b407-73ef-f01f043db91b, 'name': SearchDatastore_Task, 'duration_secs': 0.00917} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.267400] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1963.267582] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 215f658f-2af6-4525-b94c-489ad794e6f7/215f658f-2af6-4525-b94c-489ad794e6f7.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1963.267866] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-858b20d3-dcb1-4032-b787-8fe7a95c6e2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.275268] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1963.275268] env[63241]: value = "task-1821322" [ 1963.275268] env[63241]: _type = "Task" [ 1963.275268] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.282969] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.293637] env[63241]: DEBUG oslo_vmware.rw_handles [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52357ee6-0b8c-8796-d513-baeec4f97ed9/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1963.294538] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0b1e2a-7c7b-4409-bdb6-b1458e9e4fd4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.300103] env[63241]: DEBUG oslo_vmware.rw_handles [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52357ee6-0b8c-8796-d513-baeec4f97ed9/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1963.300279] env[63241]: ERROR oslo_vmware.rw_handles [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52357ee6-0b8c-8796-d513-baeec4f97ed9/disk-0.vmdk due to incomplete transfer. [ 1963.300492] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f5b0bc63-26c3-43d4-9f12-36ae30bfc740 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.308560] env[63241]: DEBUG oslo_vmware.rw_handles [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52357ee6-0b8c-8796-d513-baeec4f97ed9/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1963.308757] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Uploaded image 9f42e472-9966-4b9b-a13b-52502ede2ea3 to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1963.310987] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1963.312966] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-60addb9d-13db-4a41-932f-54bb528cac42 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.319455] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1963.319455] env[63241]: value = "task-1821323" [ 1963.319455] env[63241]: _type = "Task" [ 1963.319455] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.327381] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821323, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.785416] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821322, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.416822} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.785717] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 215f658f-2af6-4525-b94c-489ad794e6f7/215f658f-2af6-4525-b94c-489ad794e6f7.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1963.785915] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1963.786193] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de16b3d0-e630-4b37-9f5c-3b68129ed570 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.792183] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1963.792183] env[63241]: value = "task-1821324" [ 1963.792183] env[63241]: _type = "Task" [ 1963.792183] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.799919] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821324, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.817049] env[63241]: DEBUG oslo_concurrency.lockutils [None req-d5b974d2-569a-434c-b58d-1ab5532c7d09 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.254s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.829088] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821323, 'name': Destroy_Task, 'duration_secs': 0.414577} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.829229] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Destroyed the VM [ 1963.829520] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1963.829800] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fc36062b-d0b5-4c69-bb5f-c7eb0bcb79fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.837144] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1963.837144] env[63241]: value = "task-1821325" [ 1963.837144] env[63241]: _type = "Task" [ 1963.837144] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.845564] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821325, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.020723] env[63241]: DEBUG nova.compute.manager [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1964.042687] env[63241]: DEBUG nova.virt.hardware [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1964.042990] env[63241]: DEBUG nova.virt.hardware [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1964.043198] env[63241]: DEBUG nova.virt.hardware [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1964.043399] env[63241]: DEBUG nova.virt.hardware [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1964.043559] env[63241]: DEBUG nova.virt.hardware [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1964.043720] env[63241]: DEBUG nova.virt.hardware [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1964.043946] env[63241]: DEBUG nova.virt.hardware [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1964.044177] env[63241]: DEBUG nova.virt.hardware [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1964.044370] env[63241]: DEBUG nova.virt.hardware [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1964.044588] env[63241]: DEBUG nova.virt.hardware [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1964.044795] env[63241]: DEBUG nova.virt.hardware [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1964.045708] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90673683-157b-4557-9067-3593e290e745 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.055237] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609fc760-110f-4202-9448-b77397e7b743 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.302087] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821324, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061954} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.302425] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1964.303217] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6a5f69-b2d6-4b2f-a4d6-fd48b001fe36 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.325231] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 215f658f-2af6-4525-b94c-489ad794e6f7/215f658f-2af6-4525-b94c-489ad794e6f7.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1964.325722] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c879d662-c26f-4669-8492-5e94abe5615c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.348727] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821325, 'name': RemoveSnapshot_Task, 'duration_secs': 0.332474} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.349914] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1964.350245] env[63241]: DEBUG nova.compute.manager [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1964.350964] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1964.350964] env[63241]: value = "task-1821326" [ 1964.350964] env[63241]: _type = "Task" [ 1964.350964] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.351682] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c31a188-f09e-453b-8592-6aec511fa2f9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.368030] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821326, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.831691] env[63241]: DEBUG nova.compute.manager [req-5ff6b26d-0192-409c-85f6-f2ad4b6ca166 req-46e71d7e-0a64-48be-925c-387fc3cdbc3d service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Received event network-vif-plugged-4d328bf7-9485-42e2-9ad2-0b00ac726b9b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1964.832153] env[63241]: DEBUG oslo_concurrency.lockutils [req-5ff6b26d-0192-409c-85f6-f2ad4b6ca166 req-46e71d7e-0a64-48be-925c-387fc3cdbc3d service nova] Acquiring lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1964.832580] env[63241]: DEBUG oslo_concurrency.lockutils [req-5ff6b26d-0192-409c-85f6-f2ad4b6ca166 req-46e71d7e-0a64-48be-925c-387fc3cdbc3d service nova] Lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.832666] env[63241]: DEBUG oslo_concurrency.lockutils [req-5ff6b26d-0192-409c-85f6-f2ad4b6ca166 req-46e71d7e-0a64-48be-925c-387fc3cdbc3d service nova] Lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.832923] env[63241]: DEBUG nova.compute.manager [req-5ff6b26d-0192-409c-85f6-f2ad4b6ca166 req-46e71d7e-0a64-48be-925c-387fc3cdbc3d service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] No waiting events found dispatching network-vif-plugged-4d328bf7-9485-42e2-9ad2-0b00ac726b9b {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1964.833185] env[63241]: WARNING nova.compute.manager [req-5ff6b26d-0192-409c-85f6-f2ad4b6ca166 req-46e71d7e-0a64-48be-925c-387fc3cdbc3d service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Received unexpected event network-vif-plugged-4d328bf7-9485-42e2-9ad2-0b00ac726b9b for instance with vm_state building and task_state spawning. [ 1964.863816] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821326, 'name': ReconfigVM_Task, 'duration_secs': 0.362017} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.864103] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 215f658f-2af6-4525-b94c-489ad794e6f7/215f658f-2af6-4525-b94c-489ad794e6f7.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1964.864738] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bfdcafeb-cfeb-4b76-9a62-387ee2029e9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.871872] env[63241]: DEBUG oslo_concurrency.lockutils [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "26b75825-49c4-4870-957a-a2a76a970880" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1964.872102] env[63241]: DEBUG oslo_concurrency.lockutils [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.872306] env[63241]: DEBUG oslo_concurrency.lockutils [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "26b75825-49c4-4870-957a-a2a76a970880-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1964.872487] env[63241]: DEBUG oslo_concurrency.lockutils [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.872652] env[63241]: DEBUG oslo_concurrency.lockutils [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.875041] env[63241]: INFO nova.compute.manager [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Shelve offloading [ 1964.876241] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1964.876241] env[63241]: value = "task-1821327" [ 1964.876241] env[63241]: _type = "Task" [ 1964.876241] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.876644] env[63241]: INFO nova.compute.manager [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Terminating instance [ 1964.877868] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1964.878139] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e557bd72-d259-4c3c-a35e-fceb41148d7f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.882551] env[63241]: DEBUG nova.compute.manager [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1964.882743] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1964.883730] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bbd02af-a9c4-40df-8d7f-26a6049c8d3d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.887611] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1964.887611] env[63241]: value = "task-1821328" [ 1964.887611] env[63241]: _type = "Task" [ 1964.887611] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.894925] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1964.895210] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821327, 'name': Rename_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.895706] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7b5afc3-225d-4c2d-98cd-a650ac351910 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.902731] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1964.902942] env[63241]: DEBUG nova.compute.manager [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1964.903287] env[63241]: DEBUG oslo_vmware.api [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1964.903287] env[63241]: value = "task-1821329" [ 1964.903287] env[63241]: _type = "Task" [ 1964.903287] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.904032] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00a317c-854a-4841-8d47-4503d92e6b74 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.914554] env[63241]: DEBUG oslo_concurrency.lockutils [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.914862] env[63241]: DEBUG oslo_concurrency.lockutils [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.915156] env[63241]: DEBUG nova.network.neutron [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1964.919613] env[63241]: DEBUG oslo_vmware.api [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821329, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.920554] env[63241]: DEBUG nova.network.neutron [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Successfully updated port: 4d328bf7-9485-42e2-9ad2-0b00ac726b9b {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1965.388159] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821327, 'name': Rename_Task, 'duration_secs': 0.145443} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.388454] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1965.388700] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ce2becb-ef69-4fc5-8302-4e2e45489b7a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.397159] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 1965.397159] env[63241]: value = "task-1821330" [ 1965.397159] env[63241]: _type = "Task" [ 1965.397159] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.413554] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.418345] env[63241]: DEBUG oslo_vmware.api [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821329, 'name': PowerOffVM_Task, 'duration_secs': 0.194605} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.418578] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1965.418747] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1965.418980] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88f4782c-855e-4194-86d6-3b279363030d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.424402] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "refresh_cache-43684f7f-0a5d-48e5-8ab6-573db8d81ff0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1965.424402] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "refresh_cache-43684f7f-0a5d-48e5-8ab6-573db8d81ff0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1965.424402] env[63241]: DEBUG nova.network.neutron [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1965.551434] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1965.551748] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1965.551875] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleting the datastore file [datastore1] 26b75825-49c4-4870-957a-a2a76a970880 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1965.552290] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bea3ceb7-0065-4c6e-a2e4-856fb0533e0c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.559520] env[63241]: DEBUG oslo_vmware.api [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1965.559520] env[63241]: value = "task-1821332" [ 1965.559520] env[63241]: _type = "Task" [ 1965.559520] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.569860] env[63241]: DEBUG oslo_vmware.api [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821332, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.716640] env[63241]: DEBUG nova.network.neutron [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updating instance_info_cache with network_info: [{"id": "249e56d5-0dc5-4bab-9179-ca69f7024104", "address": "fa:16:3e:85:eb:39", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap249e56d5-0d", "ovs_interfaceid": "249e56d5-0dc5-4bab-9179-ca69f7024104", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1965.907312] env[63241]: DEBUG oslo_vmware.api [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821330, 'name': PowerOnVM_Task, 'duration_secs': 0.450093} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.907691] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1965.907775] env[63241]: DEBUG nova.compute.manager [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1965.908543] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5bf116-ffce-4c8d-b7ef-79bd01b53855 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.954784] env[63241]: DEBUG nova.network.neutron [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1966.069923] env[63241]: DEBUG oslo_vmware.api [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821332, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131175} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.070211] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1966.070408] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1966.070594] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1966.070799] env[63241]: INFO nova.compute.manager [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1966.071040] env[63241]: DEBUG oslo.service.loopingcall [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1966.071250] env[63241]: DEBUG nova.compute.manager [-] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1966.071345] env[63241]: DEBUG nova.network.neutron [-] [instance: 26b75825-49c4-4870-957a-a2a76a970880] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1966.093350] env[63241]: DEBUG nova.network.neutron [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Updating instance_info_cache with network_info: [{"id": "4d328bf7-9485-42e2-9ad2-0b00ac726b9b", "address": "fa:16:3e:fe:98:7c", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d328bf7-94", "ovs_interfaceid": "4d328bf7-9485-42e2-9ad2-0b00ac726b9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.219499] env[63241]: DEBUG oslo_concurrency.lockutils [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.426321] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1966.426739] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.427067] env[63241]: DEBUG nova.objects.instance [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63241) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1966.595825] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "refresh_cache-43684f7f-0a5d-48e5-8ab6-573db8d81ff0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.596197] env[63241]: DEBUG nova.compute.manager [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Instance network_info: |[{"id": "4d328bf7-9485-42e2-9ad2-0b00ac726b9b", "address": "fa:16:3e:fe:98:7c", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d328bf7-94", "ovs_interfaceid": "4d328bf7-9485-42e2-9ad2-0b00ac726b9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1966.596641] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:98:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d328bf7-9485-42e2-9ad2-0b00ac726b9b', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1966.604373] env[63241]: DEBUG oslo.service.loopingcall [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1966.604589] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1966.604812] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ca2ad77-e88f-4316-ad5c-e29a960f9a66 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.624311] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1966.624311] env[63241]: value = "task-1821333" [ 1966.624311] env[63241]: _type = "Task" [ 1966.624311] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.631569] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821333, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.915337] env[63241]: DEBUG nova.compute.manager [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Received event network-changed-4d328bf7-9485-42e2-9ad2-0b00ac726b9b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1966.915573] env[63241]: DEBUG nova.compute.manager [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Refreshing instance network info cache due to event network-changed-4d328bf7-9485-42e2-9ad2-0b00ac726b9b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1966.918301] env[63241]: DEBUG oslo_concurrency.lockutils [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] Acquiring lock "refresh_cache-43684f7f-0a5d-48e5-8ab6-573db8d81ff0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1966.918301] env[63241]: DEBUG oslo_concurrency.lockutils [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] Acquired lock "refresh_cache-43684f7f-0a5d-48e5-8ab6-573db8d81ff0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1966.918301] env[63241]: DEBUG nova.network.neutron [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Refreshing network info cache for port 4d328bf7-9485-42e2-9ad2-0b00ac726b9b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1967.075888] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1967.076961] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3bd567-2fb2-45b2-a562-ab01c4426360 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.085152] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1967.085417] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb2f181b-f0c2-44c6-b057-93fccad370fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.136243] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821333, 'name': CreateVM_Task, 'duration_secs': 0.475252} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.136950] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1967.137689] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1967.137859] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1967.138199] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1967.138675] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6c5b12c-f252-4163-a3bf-c6686e6a8853 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.143264] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1967.143264] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a33332-73cc-3a8a-67e6-4492e235621c" [ 1967.143264] env[63241]: _type = "Task" [ 1967.143264] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.151219] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a33332-73cc-3a8a-67e6-4492e235621c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.378649] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1967.378847] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1967.379025] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleting the datastore file [datastore1] e4514260-dfcc-45a3-80d5-b5484b0b599c {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1967.379291] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-946de4e6-c115-4b89-8512-2283be96c7bd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.385761] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1967.385761] env[63241]: value = "task-1821335" [ 1967.385761] env[63241]: _type = "Task" [ 1967.385761] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.393584] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.435242] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b5981604-7477-4121-bb9c-f7b3a891c4e8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.606855] env[63241]: DEBUG nova.network.neutron [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Updated VIF entry in instance network info cache for port 4d328bf7-9485-42e2-9ad2-0b00ac726b9b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1967.607250] env[63241]: DEBUG nova.network.neutron [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Updating instance_info_cache with network_info: [{"id": "4d328bf7-9485-42e2-9ad2-0b00ac726b9b", "address": "fa:16:3e:fe:98:7c", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d328bf7-94", "ovs_interfaceid": "4d328bf7-9485-42e2-9ad2-0b00ac726b9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.631877] env[63241]: DEBUG nova.network.neutron [-] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.654808] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a33332-73cc-3a8a-67e6-4492e235621c, 'name': SearchDatastore_Task, 'duration_secs': 0.009242} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.655114] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1967.655348] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1967.655577] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1967.655724] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1967.655907] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1967.656172] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-295b6afe-f688-40ef-bf04-ff48fbc32b07 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.664034] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1967.664218] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1967.664898] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce6c26e2-ae22-45ba-b122-ed02547913ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.670063] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1967.670063] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52572abf-2d47-6725-9623-ffaf9645ed91" [ 1967.670063] env[63241]: _type = "Task" [ 1967.670063] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.677351] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52572abf-2d47-6725-9623-ffaf9645ed91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.896072] env[63241]: DEBUG oslo_vmware.api [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821335, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131515} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.896344] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1967.896616] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1967.896820] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1967.919412] env[63241]: INFO nova.scheduler.client.report [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleted allocations for instance e4514260-dfcc-45a3-80d5-b5484b0b599c [ 1968.109470] env[63241]: DEBUG oslo_concurrency.lockutils [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] Releasing lock "refresh_cache-43684f7f-0a5d-48e5-8ab6-573db8d81ff0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1968.109749] env[63241]: DEBUG nova.compute.manager [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Received event network-vif-unplugged-249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1968.109951] env[63241]: DEBUG oslo_concurrency.lockutils [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] Acquiring lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.110176] env[63241]: DEBUG oslo_concurrency.lockutils [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.110343] env[63241]: DEBUG oslo_concurrency.lockutils [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.110515] env[63241]: DEBUG nova.compute.manager [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] No waiting events found dispatching network-vif-unplugged-249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1968.110689] env[63241]: WARNING nova.compute.manager [req-791a6148-fa6f-466a-aa8e-d4a18dd74c4a req-daef7593-4810-4833-955b-1e1ea3eaa2f6 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Received unexpected event network-vif-unplugged-249e56d5-0dc5-4bab-9179-ca69f7024104 for instance with vm_state shelved and task_state shelving_offloading. [ 1968.133535] env[63241]: INFO nova.compute.manager [-] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Took 2.06 seconds to deallocate network for instance. [ 1968.183874] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52572abf-2d47-6725-9623-ffaf9645ed91, 'name': SearchDatastore_Task, 'duration_secs': 0.007852} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.183874] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ef9452e-dae6-4367-86a8-695d41540a99 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.189175] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1968.189175] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522e1b8e-1132-0883-65f4-ef4b7c5584c9" [ 1968.189175] env[63241]: _type = "Task" [ 1968.189175] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.197899] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522e1b8e-1132-0883-65f4-ef4b7c5584c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.423963] env[63241]: DEBUG oslo_concurrency.lockutils [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.424275] env[63241]: DEBUG oslo_concurrency.lockutils [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.424512] env[63241]: DEBUG nova.objects.instance [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lazy-loading 'resources' on Instance uuid e4514260-dfcc-45a3-80d5-b5484b0b599c {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1968.639651] env[63241]: DEBUG oslo_concurrency.lockutils [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.700301] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522e1b8e-1132-0883-65f4-ef4b7c5584c9, 'name': SearchDatastore_Task, 'duration_secs': 0.009021} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.700513] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1968.700754] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 43684f7f-0a5d-48e5-8ab6-573db8d81ff0/43684f7f-0a5d-48e5-8ab6-573db8d81ff0.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1968.701012] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1506a5e5-86a2-4fad-96f2-ad551adfc395 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.708552] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1968.708552] env[63241]: value = "task-1821336" [ 1968.708552] env[63241]: _type = "Task" [ 1968.708552] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.716099] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821336, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.927512] env[63241]: DEBUG nova.objects.instance [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lazy-loading 'numa_topology' on Instance uuid e4514260-dfcc-45a3-80d5-b5484b0b599c {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1968.950266] env[63241]: DEBUG nova.compute.manager [req-7b8e6d44-5485-4b81-ad3d-4e43491a00c1 req-15e9df84-18ab-4a8c-b603-c2af0a807f1b service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Received event network-changed-249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1968.950415] env[63241]: DEBUG nova.compute.manager [req-7b8e6d44-5485-4b81-ad3d-4e43491a00c1 req-15e9df84-18ab-4a8c-b603-c2af0a807f1b service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Refreshing instance network info cache due to event network-changed-249e56d5-0dc5-4bab-9179-ca69f7024104. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1968.950707] env[63241]: DEBUG oslo_concurrency.lockutils [req-7b8e6d44-5485-4b81-ad3d-4e43491a00c1 req-15e9df84-18ab-4a8c-b603-c2af0a807f1b service nova] Acquiring lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.950866] env[63241]: DEBUG oslo_concurrency.lockutils [req-7b8e6d44-5485-4b81-ad3d-4e43491a00c1 req-15e9df84-18ab-4a8c-b603-c2af0a807f1b service nova] Acquired lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.951254] env[63241]: DEBUG nova.network.neutron [req-7b8e6d44-5485-4b81-ad3d-4e43491a00c1 req-15e9df84-18ab-4a8c-b603-c2af0a807f1b service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Refreshing network info cache for port 249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1969.219712] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821336, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.399899} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.219979] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 43684f7f-0a5d-48e5-8ab6-573db8d81ff0/43684f7f-0a5d-48e5-8ab6-573db8d81ff0.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1969.220219] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1969.220557] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-645105b5-9751-456c-a9da-bd2f7354ce89 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.227306] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1969.227306] env[63241]: value = "task-1821337" [ 1969.227306] env[63241]: _type = "Task" [ 1969.227306] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.235629] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821337, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.430648] env[63241]: DEBUG nova.objects.base [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1969.496702] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba0d11a-5261-4eca-a741-07d7f7d8911e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.504727] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c75c8a5-5148-4940-bd8f-4b870637feae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.537673] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec12b98-8de7-4aaa-a768-77bc56de9b47 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.545218] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a8467d-b368-42fd-b0f9-30de01966570 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.558287] env[63241]: DEBUG nova.compute.provider_tree [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1969.678572] env[63241]: DEBUG nova.network.neutron [req-7b8e6d44-5485-4b81-ad3d-4e43491a00c1 req-15e9df84-18ab-4a8c-b603-c2af0a807f1b service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updated VIF entry in instance network info cache for port 249e56d5-0dc5-4bab-9179-ca69f7024104. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1969.678937] env[63241]: DEBUG nova.network.neutron [req-7b8e6d44-5485-4b81-ad3d-4e43491a00c1 req-15e9df84-18ab-4a8c-b603-c2af0a807f1b service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updating instance_info_cache with network_info: [{"id": "249e56d5-0dc5-4bab-9179-ca69f7024104", "address": "fa:16:3e:85:eb:39", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap249e56d5-0d", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1969.736475] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821337, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060082} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.736698] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1969.737474] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdf8599-9b36-4c82-89f2-d88c84477b5b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.758984] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 43684f7f-0a5d-48e5-8ab6-573db8d81ff0/43684f7f-0a5d-48e5-8ab6-573db8d81ff0.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1969.759451] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4495656-3fc2-4f35-a0cc-df7388ed359c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.779101] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1969.779101] env[63241]: value = "task-1821338" [ 1969.779101] env[63241]: _type = "Task" [ 1969.779101] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.786748] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821338, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.061871] env[63241]: DEBUG nova.scheduler.client.report [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1970.181910] env[63241]: DEBUG oslo_concurrency.lockutils [req-7b8e6d44-5485-4b81-ad3d-4e43491a00c1 req-15e9df84-18ab-4a8c-b603-c2af0a807f1b service nova] Releasing lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.182385] env[63241]: DEBUG nova.compute.manager [req-7b8e6d44-5485-4b81-ad3d-4e43491a00c1 req-15e9df84-18ab-4a8c-b603-c2af0a807f1b service nova] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Received event network-vif-deleted-139ab667-6231-4030-a733-172ac1488ddf {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1970.289087] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821338, 'name': ReconfigVM_Task, 'duration_secs': 0.290525} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.289338] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 43684f7f-0a5d-48e5-8ab6-573db8d81ff0/43684f7f-0a5d-48e5-8ab6-573db8d81ff0.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1970.289962] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4f944c7-b253-4de6-baac-7d952a39e42e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.296893] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1970.296893] env[63241]: value = "task-1821339" [ 1970.296893] env[63241]: _type = "Task" [ 1970.296893] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.305870] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821339, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.567378] env[63241]: DEBUG oslo_concurrency.lockutils [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.143s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1970.570989] env[63241]: DEBUG oslo_concurrency.lockutils [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.931s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1970.571242] env[63241]: DEBUG nova.objects.instance [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lazy-loading 'resources' on Instance uuid 26b75825-49c4-4870-957a-a2a76a970880 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1970.808838] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821339, 'name': Rename_Task, 'duration_secs': 0.148139} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.809128] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1970.809368] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-369e7d7f-25f7-425f-9b0e-7e01bb4ecb92 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.814985] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 1970.814985] env[63241]: value = "task-1821340" [ 1970.814985] env[63241]: _type = "Task" [ 1970.814985] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.823046] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821340, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.080746] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.082447] env[63241]: DEBUG oslo_concurrency.lockutils [None req-492160f9-a741-4ed6-91be-29b56db5a802 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 19.817s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.083417] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.003s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.083661] env[63241]: INFO nova.compute.manager [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Unshelving [ 1971.168709] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2b4ad4-3114-436b-8794-acbdd1214d8e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.176750] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22ee351-836c-44c2-bcc3-7f0fb69fda46 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.207532] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c09fac-0b98-40a7-8e1a-680897548505 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.215032] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ec4402-8571-46ac-8fdd-aca38509940b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.228340] env[63241]: DEBUG nova.compute.provider_tree [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1971.325163] env[63241]: DEBUG oslo_vmware.api [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821340, 'name': PowerOnVM_Task, 'duration_secs': 0.423616} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.325494] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1971.325706] env[63241]: INFO nova.compute.manager [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Took 7.30 seconds to spawn the instance on the hypervisor. [ 1971.325890] env[63241]: DEBUG nova.compute.manager [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1971.326671] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a8faaf-1937-468d-945e-400a3d42d041 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.731772] env[63241]: DEBUG nova.scheduler.client.report [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1971.813987] env[63241]: DEBUG nova.compute.manager [req-2a517715-d037-4999-8959-e969f170007e req-7720af97-01ae-417d-b247-6318d2724c74 service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Received event network-changed-4d328bf7-9485-42e2-9ad2-0b00ac726b9b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1971.814209] env[63241]: DEBUG nova.compute.manager [req-2a517715-d037-4999-8959-e969f170007e req-7720af97-01ae-417d-b247-6318d2724c74 service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Refreshing instance network info cache due to event network-changed-4d328bf7-9485-42e2-9ad2-0b00ac726b9b. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1971.814495] env[63241]: DEBUG oslo_concurrency.lockutils [req-2a517715-d037-4999-8959-e969f170007e req-7720af97-01ae-417d-b247-6318d2724c74 service nova] Acquiring lock "refresh_cache-43684f7f-0a5d-48e5-8ab6-573db8d81ff0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1971.814644] env[63241]: DEBUG oslo_concurrency.lockutils [req-2a517715-d037-4999-8959-e969f170007e req-7720af97-01ae-417d-b247-6318d2724c74 service nova] Acquired lock "refresh_cache-43684f7f-0a5d-48e5-8ab6-573db8d81ff0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1971.814810] env[63241]: DEBUG nova.network.neutron [req-2a517715-d037-4999-8959-e969f170007e req-7720af97-01ae-417d-b247-6318d2724c74 service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Refreshing network info cache for port 4d328bf7-9485-42e2-9ad2-0b00ac726b9b {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1971.843089] env[63241]: INFO nova.compute.manager [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Took 12.06 seconds to build instance. [ 1972.106711] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.237387] env[63241]: DEBUG oslo_concurrency.lockutils [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.666s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.239830] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.133s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.240073] env[63241]: DEBUG nova.objects.instance [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lazy-loading 'pci_requests' on Instance uuid e4514260-dfcc-45a3-80d5-b5484b0b599c {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1972.256681] env[63241]: INFO nova.scheduler.client.report [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleted allocations for instance 26b75825-49c4-4870-957a-a2a76a970880 [ 1972.344504] env[63241]: DEBUG oslo_concurrency.lockutils [None req-311a39a0-6e01-463a-ac8d-57a96ffa6ba0 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.573s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.522393] env[63241]: DEBUG nova.network.neutron [req-2a517715-d037-4999-8959-e969f170007e req-7720af97-01ae-417d-b247-6318d2724c74 service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Updated VIF entry in instance network info cache for port 4d328bf7-9485-42e2-9ad2-0b00ac726b9b. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1972.522777] env[63241]: DEBUG nova.network.neutron [req-2a517715-d037-4999-8959-e969f170007e req-7720af97-01ae-417d-b247-6318d2724c74 service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Updating instance_info_cache with network_info: [{"id": "4d328bf7-9485-42e2-9ad2-0b00ac726b9b", "address": "fa:16:3e:fe:98:7c", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d328bf7-94", "ovs_interfaceid": "4d328bf7-9485-42e2-9ad2-0b00ac726b9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1972.744091] env[63241]: DEBUG nova.objects.instance [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lazy-loading 'numa_topology' on Instance uuid e4514260-dfcc-45a3-80d5-b5484b0b599c {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1972.764135] env[63241]: DEBUG oslo_concurrency.lockutils [None req-45bf347f-3599-49af-88b9-983da6d1e278 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "26b75825-49c4-4870-957a-a2a76a970880" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.892s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.025517] env[63241]: DEBUG oslo_concurrency.lockutils [req-2a517715-d037-4999-8959-e969f170007e req-7720af97-01ae-417d-b247-6318d2724c74 service nova] Releasing lock "refresh_cache-43684f7f-0a5d-48e5-8ab6-573db8d81ff0" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1973.247156] env[63241]: INFO nova.compute.claims [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1974.333251] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919530c7-be7c-4f4f-9cbe-ddaec3bab821 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.342006] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbb7231-3a56-4987-85fe-9e723c93e350 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.377326] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac10750-d93b-408b-954f-f369ce753ada {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.384757] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa1c358-b1d1-4626-b143-dbaea2db624d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.398017] env[63241]: DEBUG nova.compute.provider_tree [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1974.901316] env[63241]: DEBUG nova.scheduler.client.report [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1975.210593] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.210829] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.406361] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.166s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.455228] env[63241]: INFO nova.network.neutron [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updating port 249e56d5-0dc5-4bab-9179-ca69f7024104 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1975.713522] env[63241]: DEBUG nova.compute.manager [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1976.238505] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1976.238816] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1976.240430] env[63241]: INFO nova.compute.claims [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1977.326453] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0344e588-08f9-4f3b-8475-1ec3b3231e5c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.334906] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b78bd19-2f6e-416f-99de-5bc251040f60 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.364873] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0066ef1-4186-434d-90c0-1ebaa816d10c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.372960] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d1d899-c7cd-4c57-ac10-ceca7b5491f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.386687] env[63241]: DEBUG nova.compute.provider_tree [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1977.445784] env[63241]: DEBUG nova.compute.manager [req-43a64579-dab2-4151-9e6a-dd715ad555e0 req-42e382c0-2e08-48e4-985b-42b33c2aee6c service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Received event network-vif-plugged-249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1977.445905] env[63241]: DEBUG oslo_concurrency.lockutils [req-43a64579-dab2-4151-9e6a-dd715ad555e0 req-42e382c0-2e08-48e4-985b-42b33c2aee6c service nova] Acquiring lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.446030] env[63241]: DEBUG oslo_concurrency.lockutils [req-43a64579-dab2-4151-9e6a-dd715ad555e0 req-42e382c0-2e08-48e4-985b-42b33c2aee6c service nova] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.446229] env[63241]: DEBUG oslo_concurrency.lockutils [req-43a64579-dab2-4151-9e6a-dd715ad555e0 req-42e382c0-2e08-48e4-985b-42b33c2aee6c service nova] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.446374] env[63241]: DEBUG nova.compute.manager [req-43a64579-dab2-4151-9e6a-dd715ad555e0 req-42e382c0-2e08-48e4-985b-42b33c2aee6c service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] No waiting events found dispatching network-vif-plugged-249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1977.446545] env[63241]: WARNING nova.compute.manager [req-43a64579-dab2-4151-9e6a-dd715ad555e0 req-42e382c0-2e08-48e4-985b-42b33c2aee6c service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Received unexpected event network-vif-plugged-249e56d5-0dc5-4bab-9179-ca69f7024104 for instance with vm_state shelved_offloaded and task_state spawning. [ 1977.533844] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.534014] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.534263] env[63241]: DEBUG nova.network.neutron [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1977.889913] env[63241]: DEBUG nova.scheduler.client.report [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1978.240521] env[63241]: DEBUG nova.network.neutron [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updating instance_info_cache with network_info: [{"id": "249e56d5-0dc5-4bab-9179-ca69f7024104", "address": "fa:16:3e:85:eb:39", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap249e56d5-0d", "ovs_interfaceid": "249e56d5-0dc5-4bab-9179-ca69f7024104", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.394336] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.155s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.394910] env[63241]: DEBUG nova.compute.manager [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1978.744143] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1978.772687] env[63241]: DEBUG nova.virt.hardware [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='8c3188397e2fad96216ade192dd67d62',container_format='bare',created_at=2024-12-12T15:33:28Z,direct_url=,disk_format='vmdk',id=9f42e472-9966-4b9b-a13b-52502ede2ea3,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-695245018-shelved',owner='5d257d51a2254f5386fd3348602e5b71',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2024-12-12T15:33:41Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1978.772970] env[63241]: DEBUG nova.virt.hardware [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1978.773148] env[63241]: DEBUG nova.virt.hardware [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1978.773361] env[63241]: DEBUG nova.virt.hardware [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1978.773534] env[63241]: DEBUG nova.virt.hardware [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1978.773688] env[63241]: DEBUG nova.virt.hardware [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1978.773898] env[63241]: DEBUG nova.virt.hardware [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1978.774075] env[63241]: DEBUG nova.virt.hardware [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1978.774253] env[63241]: DEBUG nova.virt.hardware [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1978.774418] env[63241]: DEBUG nova.virt.hardware [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1978.774629] env[63241]: DEBUG nova.virt.hardware [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1978.775555] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b7d500-2bc2-46c5-94ad-fbab7ed9f6b4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.784717] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f63f9a9-1a28-42d4-91f4-e2323acbbd1b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.798522] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:eb:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '983826cf-6390-4ec6-bf97-30a1060947fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '249e56d5-0dc5-4bab-9179-ca69f7024104', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1978.806268] env[63241]: DEBUG oslo.service.loopingcall [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1978.806533] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1978.806751] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a933f35d-91c1-496c-9dc6-7d66749e6b7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.829609] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1978.829609] env[63241]: value = "task-1821344" [ 1978.829609] env[63241]: _type = "Task" [ 1978.829609] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.838637] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821344, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.901965] env[63241]: DEBUG nova.compute.utils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1978.903591] env[63241]: DEBUG nova.compute.manager [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1978.903748] env[63241]: DEBUG nova.network.neutron [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1978.947134] env[63241]: DEBUG nova.policy [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac0c578d40af405b8fe206fcd309cf0a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6c76b46a4cf4a32a4a1c25fb81a963d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1979.275817] env[63241]: DEBUG nova.network.neutron [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Successfully created port: 8611587f-7fb3-447f-81a7-8a4d5b83c554 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1979.350500] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821344, 'name': CreateVM_Task, 'duration_secs': 0.388498} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.350671] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1979.351509] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.351680] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.352055] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1979.352317] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb68336c-2415-4207-97af-a759f9c42914 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.357164] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1979.357164] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52d47680-1b00-6387-496b-f6cd267b35c1" [ 1979.357164] env[63241]: _type = "Task" [ 1979.357164] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.365042] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52d47680-1b00-6387-496b-f6cd267b35c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.406839] env[63241]: DEBUG nova.compute.manager [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1979.472684] env[63241]: DEBUG nova.compute.manager [req-febd5acb-a7b0-47b0-b5cf-028211fe258c req-f4629a15-84e6-4890-928b-a657472017d4 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Received event network-changed-249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1979.472969] env[63241]: DEBUG nova.compute.manager [req-febd5acb-a7b0-47b0-b5cf-028211fe258c req-f4629a15-84e6-4890-928b-a657472017d4 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Refreshing instance network info cache due to event network-changed-249e56d5-0dc5-4bab-9179-ca69f7024104. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1979.473246] env[63241]: DEBUG oslo_concurrency.lockutils [req-febd5acb-a7b0-47b0-b5cf-028211fe258c req-f4629a15-84e6-4890-928b-a657472017d4 service nova] Acquiring lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.473439] env[63241]: DEBUG oslo_concurrency.lockutils [req-febd5acb-a7b0-47b0-b5cf-028211fe258c req-f4629a15-84e6-4890-928b-a657472017d4 service nova] Acquired lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.473648] env[63241]: DEBUG nova.network.neutron [req-febd5acb-a7b0-47b0-b5cf-028211fe258c req-f4629a15-84e6-4890-928b-a657472017d4 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Refreshing network info cache for port 249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1979.867958] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.868249] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Processing image 9f42e472-9966-4b9b-a13b-52502ede2ea3 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1979.868486] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3/9f42e472-9966-4b9b-a13b-52502ede2ea3.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.868639] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3/9f42e472-9966-4b9b-a13b-52502ede2ea3.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.868826] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1979.869100] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36d1943e-334c-4345-a628-26b421b7654f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.888911] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1979.889140] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1979.889909] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-168cd665-b536-49cc-bee1-b40f1a1e545a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.896109] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1979.896109] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52397d43-65f2-47ce-b4a1-d7c57131af33" [ 1979.896109] env[63241]: _type = "Task" [ 1979.896109] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.904062] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52397d43-65f2-47ce-b4a1-d7c57131af33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.243842] env[63241]: DEBUG nova.network.neutron [req-febd5acb-a7b0-47b0-b5cf-028211fe258c req-f4629a15-84e6-4890-928b-a657472017d4 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updated VIF entry in instance network info cache for port 249e56d5-0dc5-4bab-9179-ca69f7024104. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1980.244249] env[63241]: DEBUG nova.network.neutron [req-febd5acb-a7b0-47b0-b5cf-028211fe258c req-f4629a15-84e6-4890-928b-a657472017d4 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updating instance_info_cache with network_info: [{"id": "249e56d5-0dc5-4bab-9179-ca69f7024104", "address": "fa:16:3e:85:eb:39", "network": {"id": "a69061bb-b4a6-4815-95a6-8470aff11fa4", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1332452830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d257d51a2254f5386fd3348602e5b71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "983826cf-6390-4ec6-bf97-30a1060947fc", "external-id": "nsx-vlan-transportzone-367", "segmentation_id": 367, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap249e56d5-0d", "ovs_interfaceid": "249e56d5-0dc5-4bab-9179-ca69f7024104", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.407477] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Preparing fetch location {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1980.407870] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Fetch image to [datastore1] OSTACK_IMG_62d473f2-2eb5-400f-86bc-542e4b634078/OSTACK_IMG_62d473f2-2eb5-400f-86bc-542e4b634078.vmdk {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1980.407945] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Downloading stream optimized image 9f42e472-9966-4b9b-a13b-52502ede2ea3 to [datastore1] OSTACK_IMG_62d473f2-2eb5-400f-86bc-542e4b634078/OSTACK_IMG_62d473f2-2eb5-400f-86bc-542e4b634078.vmdk on the data store datastore1 as vApp {{(pid=63241) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1980.408107] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Downloading image file data 9f42e472-9966-4b9b-a13b-52502ede2ea3 to the ESX as VM named 'OSTACK_IMG_62d473f2-2eb5-400f-86bc-542e4b634078' {{(pid=63241) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1980.416144] env[63241]: DEBUG nova.compute.manager [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1980.458022] env[63241]: DEBUG nova.virt.hardware [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1980.458022] env[63241]: DEBUG nova.virt.hardware [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1980.458260] env[63241]: DEBUG nova.virt.hardware [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1980.458360] env[63241]: DEBUG nova.virt.hardware [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1980.458516] env[63241]: DEBUG nova.virt.hardware [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1980.458667] env[63241]: DEBUG nova.virt.hardware [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1980.458877] env[63241]: DEBUG nova.virt.hardware [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1980.459052] env[63241]: DEBUG nova.virt.hardware [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1980.459227] env[63241]: DEBUG nova.virt.hardware [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1980.459393] env[63241]: DEBUG nova.virt.hardware [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1980.459572] env[63241]: DEBUG nova.virt.hardware [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1980.460416] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d652f52e-f75f-4d44-93d2-38d81e523adb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.470882] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918730d0-ea8b-4d23-845b-4b8ed8f5e92c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.485907] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1980.485907] env[63241]: value = "resgroup-9" [ 1980.485907] env[63241]: _type = "ResourcePool" [ 1980.485907] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1980.486181] env[63241]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b29eab27-a58b-475d-96e6-9565e5f861fc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.508521] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lease: (returnval){ [ 1980.508521] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52267d94-dee4-c5c6-fd46-0a739ac91a7c" [ 1980.508521] env[63241]: _type = "HttpNfcLease" [ 1980.508521] env[63241]: } obtained for vApp import into resource pool (val){ [ 1980.508521] env[63241]: value = "resgroup-9" [ 1980.508521] env[63241]: _type = "ResourcePool" [ 1980.508521] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1980.508845] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the lease: (returnval){ [ 1980.508845] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52267d94-dee4-c5c6-fd46-0a739ac91a7c" [ 1980.508845] env[63241]: _type = "HttpNfcLease" [ 1980.508845] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1980.515835] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1980.515835] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52267d94-dee4-c5c6-fd46-0a739ac91a7c" [ 1980.515835] env[63241]: _type = "HttpNfcLease" [ 1980.515835] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1980.746977] env[63241]: DEBUG oslo_concurrency.lockutils [req-febd5acb-a7b0-47b0-b5cf-028211fe258c req-f4629a15-84e6-4890-928b-a657472017d4 service nova] Releasing lock "refresh_cache-e4514260-dfcc-45a3-80d5-b5484b0b599c" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1980.769701] env[63241]: DEBUG nova.network.neutron [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Successfully updated port: 8611587f-7fb3-447f-81a7-8a4d5b83c554 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1981.017815] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1981.017815] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52267d94-dee4-c5c6-fd46-0a739ac91a7c" [ 1981.017815] env[63241]: _type = "HttpNfcLease" [ 1981.017815] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1981.018546] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1981.018546] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52267d94-dee4-c5c6-fd46-0a739ac91a7c" [ 1981.018546] env[63241]: _type = "HttpNfcLease" [ 1981.018546] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1981.019428] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a1c8a3-fc69-493d-86b0-7ca0d3cf3d57 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.027092] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f705ac-e0a2-36f5-7a82-6ed6391100b5/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1981.027328] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f705ac-e0a2-36f5-7a82-6ed6391100b5/disk-0.vmdk. {{(pid=63241) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1981.090185] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7a659aaf-e68b-493a-8843-0e31f3c7acc8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.272484] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "refresh_cache-8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1981.273752] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "refresh_cache-8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1981.273752] env[63241]: DEBUG nova.network.neutron [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1981.527128] env[63241]: DEBUG nova.compute.manager [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Received event network-vif-plugged-8611587f-7fb3-447f-81a7-8a4d5b83c554 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1981.527445] env[63241]: DEBUG oslo_concurrency.lockutils [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] Acquiring lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.527698] env[63241]: DEBUG oslo_concurrency.lockutils [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.527935] env[63241]: DEBUG oslo_concurrency.lockutils [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.528251] env[63241]: DEBUG nova.compute.manager [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] No waiting events found dispatching network-vif-plugged-8611587f-7fb3-447f-81a7-8a4d5b83c554 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1981.528459] env[63241]: WARNING nova.compute.manager [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Received unexpected event network-vif-plugged-8611587f-7fb3-447f-81a7-8a4d5b83c554 for instance with vm_state building and task_state spawning. [ 1981.528705] env[63241]: DEBUG nova.compute.manager [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Received event network-changed-8611587f-7fb3-447f-81a7-8a4d5b83c554 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1981.528962] env[63241]: DEBUG nova.compute.manager [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Refreshing instance network info cache due to event network-changed-8611587f-7fb3-447f-81a7-8a4d5b83c554. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1981.529137] env[63241]: DEBUG oslo_concurrency.lockutils [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] Acquiring lock "refresh_cache-8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1981.806503] env[63241]: DEBUG nova.network.neutron [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1981.957381] env[63241]: DEBUG nova.network.neutron [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Updating instance_info_cache with network_info: [{"id": "8611587f-7fb3-447f-81a7-8a4d5b83c554", "address": "fa:16:3e:3c:03:08", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8611587f-7f", "ovs_interfaceid": "8611587f-7fb3-447f-81a7-8a4d5b83c554", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.192497] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Completed reading data from the image iterator. {{(pid=63241) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1982.192840] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f705ac-e0a2-36f5-7a82-6ed6391100b5/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1982.194044] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f69757-44cb-4af9-973d-e318e63bef14 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.202388] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f705ac-e0a2-36f5-7a82-6ed6391100b5/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1982.202581] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f705ac-e0a2-36f5-7a82-6ed6391100b5/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1982.202956] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-ec348f2a-dda3-4f07-8e0e-f90fcd4c4eed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.410049] env[63241]: DEBUG oslo_vmware.rw_handles [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f705ac-e0a2-36f5-7a82-6ed6391100b5/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1982.410049] env[63241]: INFO nova.virt.vmwareapi.images [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Downloaded image file data 9f42e472-9966-4b9b-a13b-52502ede2ea3 [ 1982.410296] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f3b7fe-dc9a-435d-9eb4-490836989eb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.432051] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d3f611a-9bf9-41f5-bf81-a7f1e848405f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.459557] env[63241]: INFO nova.virt.vmwareapi.images [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] The imported VM was unregistered [ 1982.461953] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Caching image {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1982.462223] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Creating directory with path [datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3 {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1982.462737] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "refresh_cache-8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1982.463087] env[63241]: DEBUG nova.compute.manager [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Instance network_info: |[{"id": "8611587f-7fb3-447f-81a7-8a4d5b83c554", "address": "fa:16:3e:3c:03:08", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8611587f-7f", "ovs_interfaceid": "8611587f-7fb3-447f-81a7-8a4d5b83c554", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1982.463327] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2444f367-1c5f-471b-95ea-2707775226cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.465741] env[63241]: DEBUG oslo_concurrency.lockutils [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] Acquired lock "refresh_cache-8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.465919] env[63241]: DEBUG nova.network.neutron [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Refreshing network info cache for port 8611587f-7fb3-447f-81a7-8a4d5b83c554 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1982.467229] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:03:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8611587f-7fb3-447f-81a7-8a4d5b83c554', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1982.474838] env[63241]: DEBUG oslo.service.loopingcall [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1982.477840] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1982.479032] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dff78dbf-0a96-4ec6-ab24-81902779c971 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.500730] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1982.500730] env[63241]: value = "task-1821349" [ 1982.500730] env[63241]: _type = "Task" [ 1982.500730] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.501062] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Created directory with path [datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3 {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1982.501245] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_62d473f2-2eb5-400f-86bc-542e4b634078/OSTACK_IMG_62d473f2-2eb5-400f-86bc-542e4b634078.vmdk to [datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3/9f42e472-9966-4b9b-a13b-52502ede2ea3.vmdk. {{(pid=63241) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1982.504802] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-5237c5dc-7466-4ce0-8008-bc48cfab99cb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.512401] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821349, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.513722] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1982.513722] env[63241]: value = "task-1821350" [ 1982.513722] env[63241]: _type = "Task" [ 1982.513722] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.522095] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821350, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.756289] env[63241]: DEBUG nova.network.neutron [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Updated VIF entry in instance network info cache for port 8611587f-7fb3-447f-81a7-8a4d5b83c554. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1982.756741] env[63241]: DEBUG nova.network.neutron [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Updating instance_info_cache with network_info: [{"id": "8611587f-7fb3-447f-81a7-8a4d5b83c554", "address": "fa:16:3e:3c:03:08", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8611587f-7f", "ovs_interfaceid": "8611587f-7fb3-447f-81a7-8a4d5b83c554", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1983.013028] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821349, 'name': CreateVM_Task, 'duration_secs': 0.368524} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.013248] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1983.014128] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.014332] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.014793] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1983.015200] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8167c973-a9a2-4a2d-be13-f6b98a543a94 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.025964] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1983.025964] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5261c386-0cb8-ded2-b4ab-a0983d267c0e" [ 1983.025964] env[63241]: _type = "Task" [ 1983.025964] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.030244] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821350, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.040308] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5261c386-0cb8-ded2-b4ab-a0983d267c0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.259712] env[63241]: DEBUG oslo_concurrency.lockutils [req-b911b33f-5edd-4142-b1ee-a7e61c406845 req-44a7e709-a0a5-4dcd-9fd7-b7e97dccd770 service nova] Releasing lock "refresh_cache-8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.525554] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821350, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.538473] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5261c386-0cb8-ded2-b4ab-a0983d267c0e, 'name': SearchDatastore_Task, 'duration_secs': 0.085174} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.539289] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.539289] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1983.539289] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.539511] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.539939] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1983.539939] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34c02990-ef01-40fc-9502-f81d4cbe4d30 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.560281] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1983.560573] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1983.561496] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eb576c2-4113-4a89-9493-14642aa5c224 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.568781] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1983.568781] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5206f563-6b2f-9295-ac6c-2f941c3b0bf0" [ 1983.568781] env[63241]: _type = "Task" [ 1983.568781] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.577995] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5206f563-6b2f-9295-ac6c-2f941c3b0bf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.028108] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821350, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.081100] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5206f563-6b2f-9295-ac6c-2f941c3b0bf0, 'name': SearchDatastore_Task, 'duration_secs': 0.212281} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.081759] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42051f3e-317f-4b6e-a325-8af5c797d97d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.089135] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1984.089135] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52722925-b193-9ff2-f811-2451c2c3cd80" [ 1984.089135] env[63241]: _type = "Task" [ 1984.089135] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.099827] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52722925-b193-9ff2-f811-2451c2c3cd80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.528408] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821350, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.602510] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52722925-b193-9ff2-f811-2451c2c3cd80, 'name': SearchDatastore_Task, 'duration_secs': 0.083229} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.602799] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.603077] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59/8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1984.603363] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2123832-7b6f-4148-8cee-cc85783e005b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.613361] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1984.613361] env[63241]: value = "task-1821352" [ 1984.613361] env[63241]: _type = "Task" [ 1984.613361] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.623595] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.028465] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821350, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.444105} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.029000] env[63241]: INFO nova.virt.vmwareapi.ds_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_62d473f2-2eb5-400f-86bc-542e4b634078/OSTACK_IMG_62d473f2-2eb5-400f-86bc-542e4b634078.vmdk to [datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3/9f42e472-9966-4b9b-a13b-52502ede2ea3.vmdk. [ 1985.029000] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Cleaning up location [datastore1] OSTACK_IMG_62d473f2-2eb5-400f-86bc-542e4b634078 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1985.029224] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_62d473f2-2eb5-400f-86bc-542e4b634078 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1985.029381] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b65213ed-7986-476c-a7d0-13f01f98dcdb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.037571] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1985.037571] env[63241]: value = "task-1821353" [ 1985.037571] env[63241]: _type = "Task" [ 1985.037571] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.047504] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.126479] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821352, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.547544] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223611} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.547872] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1985.548094] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3/9f42e472-9966-4b9b-a13b-52502ede2ea3.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.548384] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3/9f42e472-9966-4b9b-a13b-52502ede2ea3.vmdk to [datastore1] e4514260-dfcc-45a3-80d5-b5484b0b599c/e4514260-dfcc-45a3-80d5-b5484b0b599c.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1985.548652] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e5661ad-be18-4c47-b43f-596858c13b2a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.556337] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1985.556337] env[63241]: value = "task-1821354" [ 1985.556337] env[63241]: _type = "Task" [ 1985.556337] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.564304] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.623851] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821352, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.737424} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.624133] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59/8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1985.624351] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1985.624613] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1e5bc9b-7a17-4549-b1ec-93c18df42f65 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.631781] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1985.631781] env[63241]: value = "task-1821355" [ 1985.631781] env[63241]: _type = "Task" [ 1985.631781] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.640287] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821355, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.068289] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821354, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.143931] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821355, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063572} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.144312] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1986.145314] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0208a3b-8687-4744-be45-90f44ffd623c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.168815] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59/8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1986.169185] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ce1254a-ebf1-4dcb-88ff-15828a7ffbad {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.192677] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1986.192677] env[63241]: value = "task-1821357" [ 1986.192677] env[63241]: _type = "Task" [ 1986.192677] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.202311] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.569907] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821354, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.704671] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821357, 'name': ReconfigVM_Task, 'duration_secs': 0.302779} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.705017] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59/8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1986.705656] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0402cf28-358b-4836-9cfc-ae9ac59a6326 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.715855] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1986.715855] env[63241]: value = "task-1821358" [ 1986.715855] env[63241]: _type = "Task" [ 1986.715855] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.728713] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821358, 'name': Rename_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.075970] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821354, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.228374] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821358, 'name': Rename_Task, 'duration_secs': 0.167608} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.228665] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1987.228924] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a8ab212-a6b2-4702-872c-9b3622ebaa26 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.238542] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 1987.238542] env[63241]: value = "task-1821359" [ 1987.238542] env[63241]: _type = "Task" [ 1987.238542] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.249120] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.569795] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821354, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.750509] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821359, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.070359] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821354, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.22184} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.070719] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9f42e472-9966-4b9b-a13b-52502ede2ea3/9f42e472-9966-4b9b-a13b-52502ede2ea3.vmdk to [datastore1] e4514260-dfcc-45a3-80d5-b5484b0b599c/e4514260-dfcc-45a3-80d5-b5484b0b599c.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1988.071653] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1ebd8f-bf62-42ba-8bf7-f1fbeeaa9eb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.096410] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] e4514260-dfcc-45a3-80d5-b5484b0b599c/e4514260-dfcc-45a3-80d5-b5484b0b599c.vmdk or device None with type streamOptimized {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1988.096684] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e33194f2-1659-46a2-9a6b-288751584314 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.117263] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1988.117263] env[63241]: value = "task-1821361" [ 1988.117263] env[63241]: _type = "Task" [ 1988.117263] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.125744] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821361, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.249689] env[63241]: DEBUG oslo_vmware.api [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821359, 'name': PowerOnVM_Task, 'duration_secs': 0.852567} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.249979] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1988.250169] env[63241]: INFO nova.compute.manager [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Took 7.83 seconds to spawn the instance on the hypervisor. [ 1988.250365] env[63241]: DEBUG nova.compute.manager [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1988.251212] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea0ed2e-10ed-40cc-ae3e-3e2e83323f8f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.627544] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821361, 'name': ReconfigVM_Task, 'duration_secs': 0.28764} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.627811] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Reconfigured VM instance instance-0000006d to attach disk [datastore1] e4514260-dfcc-45a3-80d5-b5484b0b599c/e4514260-dfcc-45a3-80d5-b5484b0b599c.vmdk or device None with type streamOptimized {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1988.628463] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55d027b1-5de6-4048-8ad0-934a50a87564 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.635934] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1988.635934] env[63241]: value = "task-1821362" [ 1988.635934] env[63241]: _type = "Task" [ 1988.635934] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.644763] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821362, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.768053] env[63241]: INFO nova.compute.manager [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Took 12.55 seconds to build instance. [ 1989.147075] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821362, 'name': Rename_Task, 'duration_secs': 0.164237} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.147370] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1989.147605] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66ba6513-b8bf-4753-90c6-ca72c665b691 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.154401] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 1989.154401] env[63241]: value = "task-1821363" [ 1989.154401] env[63241]: _type = "Task" [ 1989.154401] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.162400] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821363, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.270520] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5461cd59-b57f-43da-92b1-ef4ced3377cb tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.059s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.373839] env[63241]: DEBUG nova.compute.manager [req-037e70cf-0bb6-48b0-832e-688caacd99c4 req-fa2519dd-86a2-445c-b165-97ef2b22ad11 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Received event network-changed-8611587f-7fb3-447f-81a7-8a4d5b83c554 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1989.374073] env[63241]: DEBUG nova.compute.manager [req-037e70cf-0bb6-48b0-832e-688caacd99c4 req-fa2519dd-86a2-445c-b165-97ef2b22ad11 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Refreshing instance network info cache due to event network-changed-8611587f-7fb3-447f-81a7-8a4d5b83c554. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1989.374295] env[63241]: DEBUG oslo_concurrency.lockutils [req-037e70cf-0bb6-48b0-832e-688caacd99c4 req-fa2519dd-86a2-445c-b165-97ef2b22ad11 service nova] Acquiring lock "refresh_cache-8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.374442] env[63241]: DEBUG oslo_concurrency.lockutils [req-037e70cf-0bb6-48b0-832e-688caacd99c4 req-fa2519dd-86a2-445c-b165-97ef2b22ad11 service nova] Acquired lock "refresh_cache-8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.374599] env[63241]: DEBUG nova.network.neutron [req-037e70cf-0bb6-48b0-832e-688caacd99c4 req-fa2519dd-86a2-445c-b165-97ef2b22ad11 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Refreshing network info cache for port 8611587f-7fb3-447f-81a7-8a4d5b83c554 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1989.664628] env[63241]: DEBUG oslo_vmware.api [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821363, 'name': PowerOnVM_Task, 'duration_secs': 0.431958} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.665096] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1989.760534] env[63241]: DEBUG nova.compute.manager [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1989.761526] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46030b3-f775-4ca5-bcc4-0c8a3a952564 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.093715] env[63241]: DEBUG nova.network.neutron [req-037e70cf-0bb6-48b0-832e-688caacd99c4 req-fa2519dd-86a2-445c-b165-97ef2b22ad11 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Updated VIF entry in instance network info cache for port 8611587f-7fb3-447f-81a7-8a4d5b83c554. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1990.094107] env[63241]: DEBUG nova.network.neutron [req-037e70cf-0bb6-48b0-832e-688caacd99c4 req-fa2519dd-86a2-445c-b165-97ef2b22ad11 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Updating instance_info_cache with network_info: [{"id": "8611587f-7fb3-447f-81a7-8a4d5b83c554", "address": "fa:16:3e:3c:03:08", "network": {"id": "792b0abb-c53c-4207-95d1-923ad55624ce", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-130630329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6c76b46a4cf4a32a4a1c25fb81a963d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8611587f-7f", "ovs_interfaceid": "8611587f-7fb3-447f-81a7-8a4d5b83c554", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.279615] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6fc4ff49-1a81-4b4c-9efa-af6754ddb606 tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.196s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.596733] env[63241]: DEBUG oslo_concurrency.lockutils [req-037e70cf-0bb6-48b0-832e-688caacd99c4 req-fa2519dd-86a2-445c-b165-97ef2b22ad11 service nova] Releasing lock "refresh_cache-8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1992.174449] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1992.174743] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.677308] env[63241]: DEBUG nova.compute.manager [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1993.199961] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.200290] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.202138] env[63241]: INFO nova.compute.claims [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1994.293992] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17f4797-3722-4c21-b296-77f143e88819 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.302490] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e16128-575b-4eab-818e-a5f986cc9154 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.333482] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6b124c-c8f1-4c66-9790-1be3989b76cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.341493] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d504d2f-7b0b-44a7-af8a-a197eeb8252d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.354782] env[63241]: DEBUG nova.compute.provider_tree [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1994.857914] env[63241]: DEBUG nova.scheduler.client.report [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1995.363387] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.163s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.363827] env[63241]: DEBUG nova.compute.manager [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1995.869303] env[63241]: DEBUG nova.compute.utils [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1995.870746] env[63241]: DEBUG nova.compute.manager [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1995.870975] env[63241]: DEBUG nova.network.neutron [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1995.919887] env[63241]: DEBUG nova.policy [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de6df2e8caaa4c0c82c94f9d107a8e17', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6964b0dd75c4704b8f5cacd2c8e355f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 1996.208184] env[63241]: DEBUG nova.network.neutron [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Successfully created port: b4f2cf51-6a1b-455b-a0dc-72616da10ed0 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1996.374336] env[63241]: DEBUG nova.compute.manager [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1996.879952] env[63241]: INFO nova.virt.block_device [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Booting with volume 1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e at /dev/sda [ 1996.914361] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-18289283-1d0f-4472-9526-f66500c37b05 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.925013] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c87557-cc03-42c5-b417-ac93b7ce8b3d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.957630] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c801029-949d-49a8-bde9-b456ca5752ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.967500] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3e00a8-ba7a-478e-ad63-eb444850beb9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.998106] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c364d887-92b2-4325-9a3b-3f23ef9d9623 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.005771] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d75181-ac87-4873-beb0-97c22cb4a253 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.020678] env[63241]: DEBUG nova.virt.block_device [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating existing volume attachment record: 64943d46-6428-42e3-888c-8da4040a6b4b {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1997.667796] env[63241]: DEBUG nova.compute.manager [req-dcef1834-437c-4884-a9a8-4b6e58d99328 req-a000db8a-0cb7-42f7-b6a4-103f62bb2698 service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Received event network-vif-plugged-b4f2cf51-6a1b-455b-a0dc-72616da10ed0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1997.668069] env[63241]: DEBUG oslo_concurrency.lockutils [req-dcef1834-437c-4884-a9a8-4b6e58d99328 req-a000db8a-0cb7-42f7-b6a4-103f62bb2698 service nova] Acquiring lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.668244] env[63241]: DEBUG oslo_concurrency.lockutils [req-dcef1834-437c-4884-a9a8-4b6e58d99328 req-a000db8a-0cb7-42f7-b6a4-103f62bb2698 service nova] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.668409] env[63241]: DEBUG oslo_concurrency.lockutils [req-dcef1834-437c-4884-a9a8-4b6e58d99328 req-a000db8a-0cb7-42f7-b6a4-103f62bb2698 service nova] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.668573] env[63241]: DEBUG nova.compute.manager [req-dcef1834-437c-4884-a9a8-4b6e58d99328 req-a000db8a-0cb7-42f7-b6a4-103f62bb2698 service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] No waiting events found dispatching network-vif-plugged-b4f2cf51-6a1b-455b-a0dc-72616da10ed0 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1997.668743] env[63241]: WARNING nova.compute.manager [req-dcef1834-437c-4884-a9a8-4b6e58d99328 req-a000db8a-0cb7-42f7-b6a4-103f62bb2698 service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Received unexpected event network-vif-plugged-b4f2cf51-6a1b-455b-a0dc-72616da10ed0 for instance with vm_state building and task_state block_device_mapping. [ 1997.753732] env[63241]: DEBUG nova.network.neutron [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Successfully updated port: b4f2cf51-6a1b-455b-a0dc-72616da10ed0 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1998.259622] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1998.259837] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1998.259998] env[63241]: DEBUG nova.network.neutron [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1998.794031] env[63241]: DEBUG nova.network.neutron [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1998.918233] env[63241]: DEBUG nova.network.neutron [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance_info_cache with network_info: [{"id": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "address": "fa:16:3e:c2:2a:0b", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4f2cf51-6a", "ovs_interfaceid": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1999.115776] env[63241]: DEBUG nova.compute.manager [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1999.116389] env[63241]: DEBUG nova.virt.hardware [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1999.116640] env[63241]: DEBUG nova.virt.hardware [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1999.116783] env[63241]: DEBUG nova.virt.hardware [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1999.117035] env[63241]: DEBUG nova.virt.hardware [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1999.117211] env[63241]: DEBUG nova.virt.hardware [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1999.117364] env[63241]: DEBUG nova.virt.hardware [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1999.117570] env[63241]: DEBUG nova.virt.hardware [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1999.117726] env[63241]: DEBUG nova.virt.hardware [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1999.117894] env[63241]: DEBUG nova.virt.hardware [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1999.118067] env[63241]: DEBUG nova.virt.hardware [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1999.118242] env[63241]: DEBUG nova.virt.hardware [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1999.119118] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31bc5fd6-a2e9-4dd1-9bb1-2f500b78d1c3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.127895] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce931faf-4133-41e9-b32e-0eb92de4db12 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.421253] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1999.421537] env[63241]: DEBUG nova.compute.manager [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Instance network_info: |[{"id": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "address": "fa:16:3e:c2:2a:0b", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4f2cf51-6a", "ovs_interfaceid": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1999.422023] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:2a:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4f2cf51-6a1b-455b-a0dc-72616da10ed0', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1999.432295] env[63241]: DEBUG oslo.service.loopingcall [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1999.432507] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1999.432733] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c183ef0-d95d-4282-aa19-d8937ccd03cb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.452770] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1999.452770] env[63241]: value = "task-1821364" [ 1999.452770] env[63241]: _type = "Task" [ 1999.452770] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.460976] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.698666] env[63241]: DEBUG nova.compute.manager [req-e5d74e0f-556b-4591-8177-592f969ccddc req-cab4401c-a412-4d87-b5fd-9f69e8e85c92 service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Received event network-changed-b4f2cf51-6a1b-455b-a0dc-72616da10ed0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1999.698902] env[63241]: DEBUG nova.compute.manager [req-e5d74e0f-556b-4591-8177-592f969ccddc req-cab4401c-a412-4d87-b5fd-9f69e8e85c92 service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Refreshing instance network info cache due to event network-changed-b4f2cf51-6a1b-455b-a0dc-72616da10ed0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1999.699084] env[63241]: DEBUG oslo_concurrency.lockutils [req-e5d74e0f-556b-4591-8177-592f969ccddc req-cab4401c-a412-4d87-b5fd-9f69e8e85c92 service nova] Acquiring lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1999.699229] env[63241]: DEBUG oslo_concurrency.lockutils [req-e5d74e0f-556b-4591-8177-592f969ccddc req-cab4401c-a412-4d87-b5fd-9f69e8e85c92 service nova] Acquired lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1999.699393] env[63241]: DEBUG nova.network.neutron [req-e5d74e0f-556b-4591-8177-592f969ccddc req-cab4401c-a412-4d87-b5fd-9f69e8e85c92 service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Refreshing network info cache for port b4f2cf51-6a1b-455b-a0dc-72616da10ed0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1999.963742] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.401071] env[63241]: DEBUG nova.network.neutron [req-e5d74e0f-556b-4591-8177-592f969ccddc req-cab4401c-a412-4d87-b5fd-9f69e8e85c92 service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updated VIF entry in instance network info cache for port b4f2cf51-6a1b-455b-a0dc-72616da10ed0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2000.401454] env[63241]: DEBUG nova.network.neutron [req-e5d74e0f-556b-4591-8177-592f969ccddc req-cab4401c-a412-4d87-b5fd-9f69e8e85c92 service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance_info_cache with network_info: [{"id": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "address": "fa:16:3e:c2:2a:0b", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4f2cf51-6a", "ovs_interfaceid": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2000.451271] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2000.451498] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2000.451648] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2000.464715] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.904934] env[63241]: DEBUG oslo_concurrency.lockutils [req-e5d74e0f-556b-4591-8177-592f969ccddc req-cab4401c-a412-4d87-b5fd-9f69e8e85c92 service nova] Releasing lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2000.965560] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.467968] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.967527] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2001.967934] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.967934] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.968114] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.968270] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.968421] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.968607] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.968758] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.968887] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2001.969056] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.969182] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2002.469267] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.484861] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] There are 52 instances to clean {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 2002.484861] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: dfbe2f8b-e750-45b6-bc90-5021b3c0e267] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2002.967484] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.988134] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: f55ed224-90d4-4fdc-bd78-d1cfb9f641e4] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2003.050816] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "215f658f-2af6-4525-b94c-489ad794e6f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.051097] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "215f658f-2af6-4525-b94c-489ad794e6f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.051307] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "215f658f-2af6-4525-b94c-489ad794e6f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.051834] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "215f658f-2af6-4525-b94c-489ad794e6f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.052039] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "215f658f-2af6-4525-b94c-489ad794e6f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.055101] env[63241]: INFO nova.compute.manager [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Terminating instance [ 2003.056784] env[63241]: DEBUG nova.compute.manager [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2003.056983] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2003.057819] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e679b648-c7e0-4725-991e-dbda5be62307 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.065907] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2003.066146] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10f171d7-0367-49f6-b6f0-b9c3796eb9a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.073083] env[63241]: DEBUG oslo_vmware.api [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2003.073083] env[63241]: value = "task-1821365" [ 2003.073083] env[63241]: _type = "Task" [ 2003.073083] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.080990] env[63241]: DEBUG oslo_vmware.api [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.468614] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.491239] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: eb8e453e-76bf-4489-9a5f-9b15e03cd6ba] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2003.583480] env[63241]: DEBUG oslo_vmware.api [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821365, 'name': PowerOffVM_Task, 'duration_secs': 0.193687} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.583747] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2003.583919] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2003.584177] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38940b5d-fcb2-4e58-8798-ca4cef9ce898 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.968979] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.996663] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: c6387938-1e3c-43c7-a0fe-3b84fb7e6d6a] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2004.471112] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 25%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.499749] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 77c501b6-9ef7-4ad9-9013-7bf6b773f2e0] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2004.817074] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2004.817074] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2004.817339] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleting the datastore file [datastore1] 215f658f-2af6-4525-b94c-489ad794e6f7 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2004.817689] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e018aa78-26ce-4456-acf8-442489a7be10 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.826577] env[63241]: DEBUG oslo_vmware.api [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2004.826577] env[63241]: value = "task-1821367" [ 2004.826577] env[63241]: _type = "Task" [ 2004.826577] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.836579] env[63241]: DEBUG oslo_vmware.api [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.971828] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.003453] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 26b75825-49c4-4870-957a-a2a76a970880] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2005.350119] env[63241]: DEBUG oslo_vmware.api [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164498} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.350330] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2005.350517] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2005.350697] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2005.350888] env[63241]: INFO nova.compute.manager [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Took 2.29 seconds to destroy the instance on the hypervisor. [ 2005.351158] env[63241]: DEBUG oslo.service.loopingcall [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2005.351358] env[63241]: DEBUG nova.compute.manager [-] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2005.351457] env[63241]: DEBUG nova.network.neutron [-] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2005.470960] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821364, 'name': CreateVM_Task, 'duration_secs': 5.624653} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.471145] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2005.471852] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377237', 'volume_id': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'name': 'volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e2aea319-280e-4dc8-9c90-f080cdf2a08a', 'attached_at': '', 'detached_at': '', 'volume_id': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'serial': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e'}, 'boot_index': 0, 'attachment_id': '64943d46-6428-42e3-888c-8da4040a6b4b', 'disk_bus': None, 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=63241) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2005.472351] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Root volume attach. Driver type: vmdk {{(pid=63241) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2005.472866] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c604e04-045d-4028-b509-0802e7cc5f7c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.480422] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcae8d9c-335d-4014-8f54-81d821772c62 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.486341] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13046c9-8266-4036-a5c5-cb0b26d061ae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.492135] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-40b0db42-9f8d-448d-a8cc-b70375eba71e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.499113] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2005.499113] env[63241]: value = "task-1821368" [ 2005.499113] env[63241]: _type = "Task" [ 2005.499113] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.506899] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: bf339484-4b96-4326-b035-39783aff4461] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2005.508649] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821368, 'name': RelocateVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.897603] env[63241]: DEBUG nova.compute.manager [req-47a524b1-df3f-4cda-95b8-ed808bbca2e4 req-743a6db3-3e48-40ba-bb7d-92295633798e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Received event network-vif-deleted-fbe70abb-a696-4530-893c-079aa3168dc7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2005.897808] env[63241]: INFO nova.compute.manager [req-47a524b1-df3f-4cda-95b8-ed808bbca2e4 req-743a6db3-3e48-40ba-bb7d-92295633798e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Neutron deleted interface fbe70abb-a696-4530-893c-079aa3168dc7; detaching it from the instance and deleting it from the info cache [ 2005.897956] env[63241]: DEBUG nova.network.neutron [req-47a524b1-df3f-4cda-95b8-ed808bbca2e4 req-743a6db3-3e48-40ba-bb7d-92295633798e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.009802] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 98e7f776-b36e-4132-803e-f2272e26c44e] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2006.011677] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821368, 'name': RelocateVM_Task, 'duration_secs': 0.343316} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.012144] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2006.012344] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377237', 'volume_id': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'name': 'volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e2aea319-280e-4dc8-9c90-f080cdf2a08a', 'attached_at': '', 'detached_at': '', 'volume_id': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'serial': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2006.013147] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1260644-a9a3-4fc2-849e-a6231fb9afd7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.031255] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232f0f5f-c4ce-418d-83b0-a607500aa8cb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.053995] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e/volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2006.056066] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a11ff70-7ece-4197-aed8-b3ba1c9dea8d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.076245] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2006.076245] env[63241]: value = "task-1821369" [ 2006.076245] env[63241]: _type = "Task" [ 2006.076245] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.085093] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821369, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.376413] env[63241]: DEBUG nova.network.neutron [-] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.400598] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7be05e2-6a22-4f31-a360-ad1bb23dd20a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.411334] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b60086-4c28-429b-9d1f-818eb03bef79 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.443405] env[63241]: DEBUG nova.compute.manager [req-47a524b1-df3f-4cda-95b8-ed808bbca2e4 req-743a6db3-3e48-40ba-bb7d-92295633798e service nova] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Detach interface failed, port_id=fbe70abb-a696-4530-893c-079aa3168dc7, reason: Instance 215f658f-2af6-4525-b94c-489ad794e6f7 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2006.512790] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: c0ea8cf6-4023-4093-b0bc-67b02604125a] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2006.586174] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821369, 'name': ReconfigVM_Task, 'duration_secs': 0.271636} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.586346] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Reconfigured VM instance instance-00000077 to attach disk [datastore1] volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e/volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2006.591063] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5dae9121-853d-457d-92c1-64eb817c95ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.607181] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2006.607181] env[63241]: value = "task-1821370" [ 2006.607181] env[63241]: _type = "Task" [ 2006.607181] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.615781] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821370, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.879421] env[63241]: INFO nova.compute.manager [-] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Took 1.53 seconds to deallocate network for instance. [ 2007.015953] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 35f0c615-3e10-4bdf-aa8d-181f72c1c699] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2007.118055] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821370, 'name': ReconfigVM_Task, 'duration_secs': 0.13802} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.118192] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377237', 'volume_id': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'name': 'volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e2aea319-280e-4dc8-9c90-f080cdf2a08a', 'attached_at': '', 'detached_at': '', 'volume_id': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'serial': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2007.118830] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03d39d1c-9f73-4c3e-b216-7a193eeaa928 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.126168] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2007.126168] env[63241]: value = "task-1821371" [ 2007.126168] env[63241]: _type = "Task" [ 2007.126168] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.134823] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821371, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.386418] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.386826] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.387216] env[63241]: DEBUG nova.objects.instance [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'resources' on Instance uuid 215f658f-2af6-4525-b94c-489ad794e6f7 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2007.519328] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 426b001f-949f-4814-9c10-c7f44b6da44a] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2007.636905] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821371, 'name': Rename_Task, 'duration_secs': 0.133873} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.637180] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2007.637425] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61abfdce-a27e-440e-b893-4414bec620d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.644532] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2007.644532] env[63241]: value = "task-1821372" [ 2007.644532] env[63241]: _type = "Task" [ 2007.644532] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.652164] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.989551] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3568b934-755f-4c5a-87ff-7be4e2afbd35 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.998033] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5357f9f1-1136-463a-a809-4f2d34f3f2d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.028311] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 6f3cbd23-30b9-4502-be07-2edd0a701291] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2008.031065] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63ee7f0-3ac4-4e4a-bed5-648790ce6f2a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.040316] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef6e46d-59e8-48d8-b6ae-4e4862fc80e3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.055015] env[63241]: DEBUG nova.compute.provider_tree [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2008.154783] env[63241]: DEBUG oslo_vmware.api [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821372, 'name': PowerOnVM_Task, 'duration_secs': 0.426044} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.155049] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2008.155270] env[63241]: INFO nova.compute.manager [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Took 9.04 seconds to spawn the instance on the hypervisor. [ 2008.155451] env[63241]: DEBUG nova.compute.manager [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2008.156269] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ed3474-e66e-4c8d-8ffc-9bad28e82826 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.535238] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 779d2380-be6c-4fdb-8755-10e99f8a6fd9] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2008.558018] env[63241]: DEBUG nova.scheduler.client.report [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2008.672825] env[63241]: INFO nova.compute.manager [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Took 15.49 seconds to build instance. [ 2009.038567] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 6055a56d-1e0d-47bc-930b-b62206a0263e] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2009.062537] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.090032] env[63241]: INFO nova.scheduler.client.report [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleted allocations for instance 215f658f-2af6-4525-b94c-489ad794e6f7 [ 2009.175041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c1fff250-cdcd-48a0-8814-bd8e944c980f tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.542402] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: d7d5b5a1-bfe9-43a1-b8f1-0a0048562530] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2009.597358] env[63241]: DEBUG oslo_concurrency.lockutils [None req-de69a75b-72d7-45cd-91d3-f374bfe60902 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "215f658f-2af6-4525-b94c-489ad794e6f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.546s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.036328] env[63241]: DEBUG nova.compute.manager [req-8a84b75c-28c2-4f0c-beb9-ada22cd6c048 req-58ba333a-ccad-4684-bafa-b4a2745e27c5 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Received event network-changed-6bc11935-f0d5-456c-b815-ea415689a621 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2010.036328] env[63241]: DEBUG nova.compute.manager [req-8a84b75c-28c2-4f0c-beb9-ada22cd6c048 req-58ba333a-ccad-4684-bafa-b4a2745e27c5 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Refreshing instance network info cache due to event network-changed-6bc11935-f0d5-456c-b815-ea415689a621. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2010.036457] env[63241]: DEBUG oslo_concurrency.lockutils [req-8a84b75c-28c2-4f0c-beb9-ada22cd6c048 req-58ba333a-ccad-4684-bafa-b4a2745e27c5 service nova] Acquiring lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.036599] env[63241]: DEBUG oslo_concurrency.lockutils [req-8a84b75c-28c2-4f0c-beb9-ada22cd6c048 req-58ba333a-ccad-4684-bafa-b4a2745e27c5 service nova] Acquired lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.036789] env[63241]: DEBUG nova.network.neutron [req-8a84b75c-28c2-4f0c-beb9-ada22cd6c048 req-58ba333a-ccad-4684-bafa-b4a2745e27c5 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Refreshing network info cache for port 6bc11935-f0d5-456c-b815-ea415689a621 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2010.045051] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: d1abe122-0259-4f6e-b363-d7c0b1ae7a69] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2010.547563] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 49d350ff-4932-4759-a3fa-53274c484ae6] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2010.876829] env[63241]: DEBUG nova.network.neutron [req-8a84b75c-28c2-4f0c-beb9-ada22cd6c048 req-58ba333a-ccad-4684-bafa-b4a2745e27c5 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updated VIF entry in instance network info cache for port 6bc11935-f0d5-456c-b815-ea415689a621. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2010.876829] env[63241]: DEBUG nova.network.neutron [req-8a84b75c-28c2-4f0c-beb9-ada22cd6c048 req-58ba333a-ccad-4684-bafa-b4a2745e27c5 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updating instance_info_cache with network_info: [{"id": "6bc11935-f0d5-456c-b815-ea415689a621", "address": "fa:16:3e:de:e7:97", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bc11935-f0", "ovs_interfaceid": "6bc11935-f0d5-456c-b815-ea415689a621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.051150] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 12b99b2b-56f0-4ce9-8897-f429c2084f38] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2011.379900] env[63241]: DEBUG oslo_concurrency.lockutils [req-8a84b75c-28c2-4f0c-beb9-ada22cd6c048 req-58ba333a-ccad-4684-bafa-b4a2745e27c5 service nova] Releasing lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.556873] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 91b65576-47be-4a92-a6fd-8380532c8e1d] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2011.560312] env[63241]: DEBUG nova.compute.manager [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Stashing vm_state: active {{(pid=63241) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2012.060748] env[63241]: DEBUG nova.compute.manager [req-4c62b39b-044a-4842-b4f5-76b92c7f316a req-ca80655e-f60f-426d-9a6c-9696f4661e1f service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Received event network-changed-b4f2cf51-6a1b-455b-a0dc-72616da10ed0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2012.060995] env[63241]: DEBUG nova.compute.manager [req-4c62b39b-044a-4842-b4f5-76b92c7f316a req-ca80655e-f60f-426d-9a6c-9696f4661e1f service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Refreshing instance network info cache due to event network-changed-b4f2cf51-6a1b-455b-a0dc-72616da10ed0. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2012.061196] env[63241]: DEBUG oslo_concurrency.lockutils [req-4c62b39b-044a-4842-b4f5-76b92c7f316a req-ca80655e-f60f-426d-9a6c-9696f4661e1f service nova] Acquiring lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.061340] env[63241]: DEBUG oslo_concurrency.lockutils [req-4c62b39b-044a-4842-b4f5-76b92c7f316a req-ca80655e-f60f-426d-9a6c-9696f4661e1f service nova] Acquired lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.061503] env[63241]: DEBUG nova.network.neutron [req-4c62b39b-044a-4842-b4f5-76b92c7f316a req-ca80655e-f60f-426d-9a6c-9696f4661e1f service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Refreshing network info cache for port b4f2cf51-6a1b-455b-a0dc-72616da10ed0 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2012.062713] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 6b96988b-cc79-41d7-a17d-277ae5aeb4dc] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2012.076780] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.077083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.311245] env[63241]: DEBUG nova.compute.manager [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Stashing vm_state: active {{(pid=63241) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2012.567865] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 46ac69f3-375c-4b60-bc33-83ad8577c4fb] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2012.581556] env[63241]: INFO nova.compute.claims [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2012.789986] env[63241]: DEBUG nova.network.neutron [req-4c62b39b-044a-4842-b4f5-76b92c7f316a req-ca80655e-f60f-426d-9a6c-9696f4661e1f service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updated VIF entry in instance network info cache for port b4f2cf51-6a1b-455b-a0dc-72616da10ed0. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2012.790407] env[63241]: DEBUG nova.network.neutron [req-4c62b39b-044a-4842-b4f5-76b92c7f316a req-ca80655e-f60f-426d-9a6c-9696f4661e1f service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance_info_cache with network_info: [{"id": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "address": "fa:16:3e:c2:2a:0b", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4f2cf51-6a", "ovs_interfaceid": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.830763] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.071274] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 037f539f-1bf1-4897-81b3-08c377b92211] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2013.087535] env[63241]: INFO nova.compute.resource_tracker [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating resource usage from migration 270ab512-ba77-4b68-a65a-78ce58c70d25 [ 2013.181213] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064cc67b-12a7-4e1b-b7c7-44e4e0cc768a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.189255] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8437cf-1429-409d-8949-9d239c070f23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.219556] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b75190b-9eee-4890-9dc4-31f1437048e3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.227119] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aea4c85-31eb-4db7-84e7-2260d487e86d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.240232] env[63241]: DEBUG nova.compute.provider_tree [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2013.293276] env[63241]: DEBUG oslo_concurrency.lockutils [req-4c62b39b-044a-4842-b4f5-76b92c7f316a req-ca80655e-f60f-426d-9a6c-9696f4661e1f service nova] Releasing lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.574385] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e62f49f0-370d-4b5d-ab43-72e0e6238432] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2013.743446] env[63241]: DEBUG nova.scheduler.client.report [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2014.077716] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: b6d32bfb-f4c8-41f2-abd0-0565dd3d2f1e] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2014.248938] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.172s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.249205] env[63241]: INFO nova.compute.manager [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Migrating [ 2014.256000] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.425s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.580713] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 6b4debb5-5a83-45f7-bcf2-36a10f95f644] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2014.767709] env[63241]: INFO nova.compute.claims [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2014.773012] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.773268] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.773501] env[63241]: DEBUG nova.network.neutron [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2015.083970] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: b7378019-a572-4d4d-a82d-cee13a1b6a88] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2015.277476] env[63241]: INFO nova.compute.resource_tracker [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating resource usage from migration 328dbc22-b6c0-4e90-bc54-27344128ce3b [ 2015.385448] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0831380d-3e0b-4474-9728-7f536845d068 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.395012] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6731b1de-bbfe-4e06-a382-7a49a63b335b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.427333] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7704e6b5-c5c6-4101-936d-791e0af48ddf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.434885] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cda8a7-efa9-4944-b330-5cfd8a145d2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.447966] env[63241]: DEBUG nova.compute.provider_tree [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2015.521580] env[63241]: DEBUG nova.network.neutron [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance_info_cache with network_info: [{"id": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "address": "fa:16:3e:c2:2a:0b", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4f2cf51-6a", "ovs_interfaceid": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.587237] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 864175e0-33f0-429f-bdf6-722d9b00da2b] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2015.951782] env[63241]: DEBUG nova.scheduler.client.report [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2016.023942] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.089985] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 01af6dc5-e0e7-4f8b-ad07-73af80c32577] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2016.456953] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.201s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.457242] env[63241]: INFO nova.compute.manager [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Migrating [ 2016.593306] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 31998a62-70f5-4205-89b9-df8312916126] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2016.974050] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.974050] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.974050] env[63241]: DEBUG nova.network.neutron [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2017.096710] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: f65e5b00-38b5-4453-b370-1f56f18053eb] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2017.537581] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff388ba-43ec-44ec-bdf9-b2930fb556b8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.557546] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance 'e2aea319-280e-4dc8-9c90-f080cdf2a08a' progress to 0 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2017.600313] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 381bba62-49a7-4d6f-b12a-741f5d884fe5] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2017.692375] env[63241]: DEBUG nova.network.neutron [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance_info_cache with network_info: [{"id": "6be29b8b-a0d6-4346-b774-5faf878f177c", "address": "fa:16:3e:0f:60:27", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be29b8b-a0", "ovs_interfaceid": "6be29b8b-a0d6-4346-b774-5faf878f177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2018.063808] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2018.064326] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fce07e1-8b33-420a-bc7e-dd71d56e6ff4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.072294] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2018.072294] env[63241]: value = "task-1821373" [ 2018.072294] env[63241]: _type = "Task" [ 2018.072294] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.081517] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.107368] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 44ddb1f0-fd5c-4c9e-baf2-eec09d80f490] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2018.195429] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.582090] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821373, 'name': PowerOffVM_Task, 'duration_secs': 0.191542} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.582332] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2018.582500] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance 'e2aea319-280e-4dc8-9c90-f080cdf2a08a' progress to 17 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2018.610735] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: a77f7227-0285-48b8-bb3b-f5cfe7ad4646] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2019.089507] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2019.089816] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2019.090098] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2019.090259] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2019.090602] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2019.090735] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2019.090866] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2019.091045] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2019.091227] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2019.091394] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2019.091568] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2019.096879] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d49f29c-f4d8-4a8f-ae34-bd8ea8944e33 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.112954] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2019.112954] env[63241]: value = "task-1821374" [ 2019.112954] env[63241]: _type = "Task" [ 2019.112954] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.113463] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 73ea6bff-60da-4691-a569-f4e9ae92f701] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2019.124289] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821374, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.619301] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: f372d405-f7d5-4e5f-8c36-fe9651af2a0d] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2019.624102] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821374, 'name': ReconfigVM_Task, 'duration_secs': 0.310897} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.624583] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance 'e2aea319-280e-4dc8-9c90-f080cdf2a08a' progress to 33 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2019.709013] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4caf87f-5a08-4ca0-bf6a-4d7eb3eeb492 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.727547] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance 'e28ba013-0bc5-4edc-858d-674980bc8742' progress to 0 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2020.125064] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 1e172f73-972e-4401-b358-512f7e03b27f] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2020.130113] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2020.130348] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2020.130497] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2020.130679] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2020.130824] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2020.130970] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2020.131189] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2020.131353] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2020.131518] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2020.131679] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2020.131850] env[63241]: DEBUG nova.virt.hardware [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2020.137185] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2020.137446] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6463ab5-2d6a-436f-9735-599694b011dc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.157322] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2020.157322] env[63241]: value = "task-1821375" [ 2020.157322] env[63241]: _type = "Task" [ 2020.157322] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.165290] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821375, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.234031] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2020.234399] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70040ca4-28c9-41e9-b16a-d02bab3140f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.242426] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2020.242426] env[63241]: value = "task-1821376" [ 2020.242426] env[63241]: _type = "Task" [ 2020.242426] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.256445] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821376, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.629070] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: c8f1ce16-70b7-41fd-8516-63198139c1cc] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2020.667283] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821375, 'name': ReconfigVM_Task, 'duration_secs': 0.159996} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.667566] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2020.668405] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ed560a-3ec7-4a69-9089-066282c16705 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.691175] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e/volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2020.691696] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee65db14-f833-4d51-bbf7-9eba5c40e52b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.708821] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2020.708821] env[63241]: value = "task-1821377" [ 2020.708821] env[63241]: _type = "Task" [ 2020.708821] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.716393] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821377, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.751161] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821376, 'name': PowerOffVM_Task, 'duration_secs': 0.434051} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.751446] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2020.751647] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance 'e28ba013-0bc5-4edc-858d-674980bc8742' progress to 17 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2021.131975] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0e4a3b3a-4464-404f-9154-1ab6f97ae951] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2021.218403] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821377, 'name': ReconfigVM_Task, 'duration_secs': 0.263898} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.218671] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Reconfigured VM instance instance-00000077 to attach disk [datastore1] volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e/volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2021.218936] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance 'e2aea319-280e-4dc8-9c90-f080cdf2a08a' progress to 50 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2021.257484] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2021.257709] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2021.257889] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2021.258059] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2021.258212] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2021.258360] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2021.258577] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2021.258736] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2021.258902] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2021.259081] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2021.259260] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2021.264683] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96cf2c92-50f5-40e1-a0eb-3205248775a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.280093] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2021.280093] env[63241]: value = "task-1821378" [ 2021.280093] env[63241]: _type = "Task" [ 2021.280093] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.288073] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821378, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.635378] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 1dc98fbd-a52b-42fa-8d37-d14318dbc941] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2021.726257] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d8334d-e1fc-4686-aeaf-88d7de606b28 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.747346] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb24802a-5cfe-4a79-a3f4-19f3ea7a20e5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.764580] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance 'e2aea319-280e-4dc8-9c90-f080cdf2a08a' progress to 67 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2021.789230] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821378, 'name': ReconfigVM_Task, 'duration_secs': 0.174277} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.789811] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance 'e28ba013-0bc5-4edc-858d-674980bc8742' progress to 33 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2022.139111] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: f8a334ae-9bc4-4b7f-a49a-8a99a1fb0067] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2022.295585] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2022.295852] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2022.296026] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2022.296349] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2022.296390] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2022.296537] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2022.296746] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2022.296903] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2022.297102] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2022.297284] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2022.297459] env[63241]: DEBUG nova.virt.hardware [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2022.302716] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Reconfiguring VM instance instance-0000005c to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2022.302990] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c29900bc-2294-41ca-a902-08b780b2a3b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.321921] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2022.321921] env[63241]: value = "task-1821379" [ 2022.321921] env[63241]: _type = "Task" [ 2022.321921] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.334240] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821379, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.642867] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 7f1710d0-857d-41fc-8151-8c5e129dda08] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2022.834246] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821379, 'name': ReconfigVM_Task, 'duration_secs': 0.160112} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.834502] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Reconfigured VM instance instance-0000005c to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2022.835260] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1e9ad5-ef41-4b1f-b008-e3de0f7d68c7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.856642] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] e28ba013-0bc5-4edc-858d-674980bc8742/e28ba013-0bc5-4edc-858d-674980bc8742.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2022.856863] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21f1fa41-bac9-44ef-8f81-db01650f7f9f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.875972] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2022.875972] env[63241]: value = "task-1821380" [ 2022.875972] env[63241]: _type = "Task" [ 2022.875972] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.883197] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821380, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.146251] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 29b6caa8-a07c-494b-b776-b08affa45c87] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2023.385447] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821380, 'name': ReconfigVM_Task, 'duration_secs': 0.253513} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.385741] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Reconfigured VM instance instance-0000005c to attach disk [datastore1] e28ba013-0bc5-4edc-858d-674980bc8742/e28ba013-0bc5-4edc-858d-674980bc8742.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2023.385998] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance 'e28ba013-0bc5-4edc-858d-674980bc8742' progress to 50 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2023.404739] env[63241]: DEBUG nova.network.neutron [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Port b4f2cf51-6a1b-455b-a0dc-72616da10ed0 binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2023.649658] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: cb7eb689-b8f6-479d-aa6b-c27fab16e131] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2023.892522] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf6353d-093b-4015-af76-cd54ffebc340 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.915313] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db170190-0325-45fb-9480-32c7242ad314 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.933423] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance 'e28ba013-0bc5-4edc-858d-674980bc8742' progress to 67 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2024.153014] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 14af9f82-525e-453c-8dc5-ef5b13c67ee4] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2024.432698] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.432698] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.432958] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.470376] env[63241]: DEBUG nova.network.neutron [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Port 6be29b8b-a0d6-4346-b774-5faf878f177c binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2024.655871] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: fb5d60fa-fa13-44a1-8291-4645761a0c80] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2025.161236] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 9d301157-6870-4452-9ae6-0d45c4338886] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2025.492022] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "e28ba013-0bc5-4edc-858d-674980bc8742-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.492418] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.492807] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.494433] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2025.495467] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2025.495467] env[63241]: DEBUG nova.network.neutron [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2025.496338] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.496543] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.496737] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.497083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.497083] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.499044] env[63241]: INFO nova.compute.manager [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Terminating instance [ 2025.500589] env[63241]: DEBUG nova.compute.manager [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2025.500787] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2025.501624] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa903aab-462b-43d6-9650-dfbd534d7fa9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.510503] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2025.510751] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac13b071-8020-4fcd-8670-33a3c48b6469 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.517620] env[63241]: DEBUG oslo_vmware.api [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 2025.517620] env[63241]: value = "task-1821381" [ 2025.517620] env[63241]: _type = "Task" [ 2025.517620] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.526418] env[63241]: DEBUG oslo_vmware.api [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.664496] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e3842404-2c80-4fa9-b0c9-c58c484845a2] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2025.993472] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.993735] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.028451] env[63241]: DEBUG oslo_vmware.api [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821381, 'name': PowerOffVM_Task, 'duration_secs': 0.225264} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.030764] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2026.030954] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2026.031219] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1decd038-5b1e-4b6f-bb43-4a33d08776d0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.168677] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: c7b034f7-1d7f-4782-9ecb-5987c35339cc] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2026.262409] env[63241]: DEBUG nova.network.neutron [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance_info_cache with network_info: [{"id": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "address": "fa:16:3e:c2:2a:0b", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4f2cf51-6a", "ovs_interfaceid": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2026.497030] env[63241]: DEBUG nova.compute.utils [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2026.531228] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2026.531405] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2026.531671] env[63241]: DEBUG nova.network.neutron [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2026.672228] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 965f3d8a-7a8e-464e-b117-ce1ca8e8a0ce] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2026.765216] env[63241]: DEBUG oslo_concurrency.lockutils [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2027.000590] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2027.175285] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 9361ee6a-7c4d-4409-bc3c-7da7d4550d97] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2027.224712] env[63241]: DEBUG nova.network.neutron [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance_info_cache with network_info: [{"id": "6be29b8b-a0d6-4346-b774-5faf878f177c", "address": "fa:16:3e:0f:60:27", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be29b8b-a0", "ovs_interfaceid": "6be29b8b-a0d6-4346-b774-5faf878f177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.273738] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196e2ad4-24a9-418d-9ae8-dfcadd9bdfa2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.280826] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d450caa0-ce44-484f-b1bf-a3f375d40035 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.678266] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 72a11582-1fad-428a-bde1-e9d0b05731cd] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2027.727108] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2028.057123] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.057447] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.057705] env[63241]: INFO nova.compute.manager [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Attaching volume c5e95465-81bd-4398-9449-346bdd72ba65 to /dev/sdb [ 2028.086407] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a54790-fdcc-4354-89f6-e3accb0d91c7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.093079] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209bd010-391f-411c-bfb2-28b7a35ba2d1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.106907] env[63241]: DEBUG nova.virt.block_device [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Updating existing volume attachment record: d4397cd2-16bf-4f15-8930-4f08b608cd70 {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2028.181993] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 94a604da-ad3d-415a-aa92-d648e3da803d] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2028.278053] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68354958-f845-4e4d-9198-f2ca5acac7c6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.297533] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec18b08-502b-42ca-9194-07e40d8ee909 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.303846] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance 'e28ba013-0bc5-4edc-858d-674980bc8742' progress to 83 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2028.368664] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f25d3b-4512-4f0e-a067-37acdaf49998 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.388547] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016300e6-cfc3-4373-ad09-35c1beed0c8b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.395735] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance 'e2aea319-280e-4dc8-9c90-f080cdf2a08a' progress to 83 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2028.686400] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2028.686586] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances with incomplete migration {{(pid=63241) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 2028.810569] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2028.810874] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43d83fb4-657a-4191-a949-bccfeed5fe18 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.817939] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2028.817939] env[63241]: value = "task-1821384" [ 2028.817939] env[63241]: _type = "Task" [ 2028.817939] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.826146] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821384, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.902301] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2028.902675] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f7dd97b-870a-4fbd-b8c6-5f8c6be2c948 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.909719] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2028.909719] env[63241]: value = "task-1821385" [ 2028.909719] env[63241]: _type = "Task" [ 2028.909719] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.925669] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821385, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.190189] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2029.329376] env[63241]: DEBUG oslo_vmware.api [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821384, 'name': PowerOnVM_Task, 'duration_secs': 0.364435} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.329658] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2029.329844] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a537644a-a8a0-42a6-947d-91f0edd45e1b tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance 'e28ba013-0bc5-4edc-858d-674980bc8742' progress to 100 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2029.419460] env[63241]: DEBUG oslo_vmware.api [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821385, 'name': PowerOnVM_Task, 'duration_secs': 0.380616} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.419674] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2029.419855] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-39d5cb75-be67-413c-ba18-0de502daf195 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance 'e2aea319-280e-4dc8-9c90-f080cdf2a08a' progress to 100 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2030.174623] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.677632] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.677882] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.678064] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.678244] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2030.679167] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb693948-58be-41c9-9a87-5b11a84d3f33 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.688276] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babadfa8-ebc4-48e1-a990-7778fb7b5130 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.701935] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0ba4e7-e908-4723-819f-31d20070d21b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.708147] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2b35c2-44a6-4c83-a50c-508161239ec1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.737088] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179668MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2030.737230] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.737440] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.423579] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2031.423803] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2031.423997] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleting the datastore file [datastore1] e4514260-dfcc-45a3-80d5-b5484b0b599c {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2031.424342] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03ca822b-e28a-4b85-86d1-cb57aa20d504 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.431829] env[63241]: DEBUG oslo_vmware.api [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for the task: (returnval){ [ 2031.431829] env[63241]: value = "task-1821387" [ 2031.431829] env[63241]: _type = "Task" [ 2031.431829] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.440347] env[63241]: DEBUG oslo_vmware.api [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821387, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.662761] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "e28ba013-0bc5-4edc-858d-674980bc8742" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.662994] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.663211] env[63241]: DEBUG nova.compute.manager [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Going to confirm migration 7 {{(pid=63241) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2031.745601] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Applying migration context for instance e28ba013-0bc5-4edc-858d-674980bc8742 as it has an incoming, in-progress migration 328dbc22-b6c0-4e90-bc54-27344128ce3b. Migration status is confirming {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2031.745916] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Applying migration context for instance e2aea319-280e-4dc8-9c90-f080cdf2a08a as it has an incoming, in-progress migration 270ab512-ba77-4b68-a65a-78ce58c70d25. Migration status is finished {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2031.746900] env[63241]: INFO nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating resource usage from migration 328dbc22-b6c0-4e90-bc54-27344128ce3b [ 2031.747232] env[63241]: INFO nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating resource usage from migration 270ab512-ba77-4b68-a65a-78ce58c70d25 [ 2031.766540] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 31e84206-e583-4610-969e-2ccae2d0b206 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2031.766702] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 43684f7f-0a5d-48e5-8ab6-573db8d81ff0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2031.766828] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e4514260-dfcc-45a3-80d5-b5484b0b599c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2031.766947] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2031.767087] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Migration 270ab512-ba77-4b68-a65a-78ce58c70d25 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2031.767209] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e2aea319-280e-4dc8-9c90-f080cdf2a08a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2031.767327] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Migration 328dbc22-b6c0-4e90-bc54-27344128ce3b is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2031.767443] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e28ba013-0bc5-4edc-858d-674980bc8742 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2031.767638] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2031.767776] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2176MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2031.869852] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b6e837-3456-4f51-a1ed-53074c396c50 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.877489] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8bb2bc-27cf-4414-a0f8-d074b3dd05e7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.907782] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08a371e-904a-400d-acca-17502b055231 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.915091] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f18148-6f00-4005-8a80-a4e0860f9e1b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.927974] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2031.940932] env[63241]: DEBUG oslo_vmware.api [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Task: {'id': task-1821387, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158849} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.941192] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2031.941386] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2031.941571] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2031.941752] env[63241]: INFO nova.compute.manager [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Took 6.44 seconds to destroy the instance on the hypervisor. [ 2031.941990] env[63241]: DEBUG oslo.service.loopingcall [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2031.942192] env[63241]: DEBUG nova.compute.manager [-] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2031.942295] env[63241]: DEBUG nova.network.neutron [-] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2031.952859] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.953359] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.953545] env[63241]: DEBUG nova.compute.manager [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Going to confirm migration 6 {{(pid=63241) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2032.201066] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2032.201267] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2032.201447] env[63241]: DEBUG nova.network.neutron [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2032.201638] env[63241]: DEBUG nova.objects.instance [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'info_cache' on Instance uuid e28ba013-0bc5-4edc-858d-674980bc8742 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2032.412025] env[63241]: DEBUG nova.compute.manager [req-18997ed4-1329-4b79-b4bd-af0ce4173fae req-4b93bcc5-03fb-4aca-a4af-97c11d0e0781 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Received event network-vif-deleted-249e56d5-0dc5-4bab-9179-ca69f7024104 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2032.412242] env[63241]: INFO nova.compute.manager [req-18997ed4-1329-4b79-b4bd-af0ce4173fae req-4b93bcc5-03fb-4aca-a4af-97c11d0e0781 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Neutron deleted interface 249e56d5-0dc5-4bab-9179-ca69f7024104; detaching it from the instance and deleting it from the info cache [ 2032.412399] env[63241]: DEBUG nova.network.neutron [req-18997ed4-1329-4b79-b4bd-af0ce4173fae req-4b93bcc5-03fb-4aca-a4af-97c11d0e0781 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2032.431096] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2032.489630] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2032.489630] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquired lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2032.489820] env[63241]: DEBUG nova.network.neutron [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2032.489984] env[63241]: DEBUG nova.objects.instance [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lazy-loading 'info_cache' on Instance uuid e2aea319-280e-4dc8-9c90-f080cdf2a08a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2032.648967] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2032.648967] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377240', 'volume_id': 'c5e95465-81bd-4398-9449-346bdd72ba65', 'name': 'volume-c5e95465-81bd-4398-9449-346bdd72ba65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59', 'attached_at': '', 'detached_at': '', 'volume_id': 'c5e95465-81bd-4398-9449-346bdd72ba65', 'serial': 'c5e95465-81bd-4398-9449-346bdd72ba65'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2032.649918] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80941713-42f9-4f81-956e-a0e21d235175 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.667534] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a0f8d8-aff9-4dd5-837e-93b85e1fdb70 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.692256] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] volume-c5e95465-81bd-4398-9449-346bdd72ba65/volume-c5e95465-81bd-4398-9449-346bdd72ba65.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2032.692509] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d789d98-d012-4815-81bc-f0f0ccb11e33 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.712698] env[63241]: DEBUG oslo_vmware.api [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 2032.712698] env[63241]: value = "task-1821388" [ 2032.712698] env[63241]: _type = "Task" [ 2032.712698] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.721494] env[63241]: DEBUG oslo_vmware.api [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821388, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.892038] env[63241]: DEBUG nova.network.neutron [-] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2032.914602] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-178aa0c6-602b-4bf8-a128-4bafbcfd380e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.924095] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0685fdf-7014-4d3e-8933-2d6d5974a2b1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.935356] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2032.935532] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.198s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.953168] env[63241]: DEBUG nova.compute.manager [req-18997ed4-1329-4b79-b4bd-af0ce4173fae req-4b93bcc5-03fb-4aca-a4af-97c11d0e0781 service nova] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Detach interface failed, port_id=249e56d5-0dc5-4bab-9179-ca69f7024104, reason: Instance e4514260-dfcc-45a3-80d5-b5484b0b599c could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2033.222528] env[63241]: DEBUG oslo_vmware.api [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821388, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.394949] env[63241]: INFO nova.compute.manager [-] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Took 1.45 seconds to deallocate network for instance. [ 2033.410172] env[63241]: DEBUG nova.network.neutron [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance_info_cache with network_info: [{"id": "6be29b8b-a0d6-4346-b774-5faf878f177c", "address": "fa:16:3e:0f:60:27", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6be29b8b-a0", "ovs_interfaceid": "6be29b8b-a0d6-4346-b774-5faf878f177c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2033.692160] env[63241]: DEBUG nova.network.neutron [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance_info_cache with network_info: [{"id": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "address": "fa:16:3e:c2:2a:0b", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4f2cf51-6a", "ovs_interfaceid": "b4f2cf51-6a1b-455b-a0dc-72616da10ed0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2033.722722] env[63241]: DEBUG oslo_vmware.api [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821388, 'name': ReconfigVM_Task, 'duration_secs': 0.559531} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.723011] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Reconfigured VM instance instance-00000076 to attach disk [datastore1] volume-c5e95465-81bd-4398-9449-346bdd72ba65/volume-c5e95465-81bd-4398-9449-346bdd72ba65.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2033.727990] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f214cff5-b164-45a6-a1ec-9a685496ee2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.743243] env[63241]: DEBUG oslo_vmware.api [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 2033.743243] env[63241]: value = "task-1821389" [ 2033.743243] env[63241]: _type = "Task" [ 2033.743243] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.752468] env[63241]: DEBUG oslo_vmware.api [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.901810] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.902157] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.902334] env[63241]: DEBUG nova.objects.instance [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lazy-loading 'resources' on Instance uuid e4514260-dfcc-45a3-80d5-b5484b0b599c {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2033.911389] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-e28ba013-0bc5-4edc-858d-674980bc8742" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2033.911614] env[63241]: DEBUG nova.objects.instance [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'migration_context' on Instance uuid e28ba013-0bc5-4edc-858d-674980bc8742 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2034.195708] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Releasing lock "refresh_cache-e2aea319-280e-4dc8-9c90-f080cdf2a08a" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.195967] env[63241]: DEBUG nova.objects.instance [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lazy-loading 'migration_context' on Instance uuid e2aea319-280e-4dc8-9c90-f080cdf2a08a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2034.253054] env[63241]: DEBUG oslo_vmware.api [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821389, 'name': ReconfigVM_Task, 'duration_secs': 0.14067} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.253372] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377240', 'volume_id': 'c5e95465-81bd-4398-9449-346bdd72ba65', 'name': 'volume-c5e95465-81bd-4398-9449-346bdd72ba65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59', 'attached_at': '', 'detached_at': '', 'volume_id': 'c5e95465-81bd-4398-9449-346bdd72ba65', 'serial': 'c5e95465-81bd-4398-9449-346bdd72ba65'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2034.414308] env[63241]: DEBUG nova.objects.base [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2034.415625] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012f8af2-07e8-4624-9187-1066d33f23e4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.436341] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-897bc087-d8b2-4a87-ade0-cd1c42c58f58 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.441609] env[63241]: DEBUG oslo_vmware.api [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2034.441609] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b969d0-d5bc-e1d4-1d36-67c27dfd0488" [ 2034.441609] env[63241]: _type = "Task" [ 2034.441609] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.448893] env[63241]: DEBUG oslo_vmware.api [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b969d0-d5bc-e1d4-1d36-67c27dfd0488, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.513563] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ecc7cb-a605-456b-97c9-0cc729249206 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.520851] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1abe75-2f24-4310-87f9-5035cf9c2efe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.550159] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255fdb96-934f-4a0d-95d6-ebbea69f9518 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.557345] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2484f2-f3c9-43b2-9812-aa775c670475 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.571513] env[63241]: DEBUG nova.compute.provider_tree [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2034.698786] env[63241]: DEBUG nova.objects.base [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2034.699673] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0b5ac8-596d-40e9-8de9-8042fd10103e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.718868] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72628eee-8230-4e49-a603-ad839e30d2bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.724584] env[63241]: DEBUG oslo_vmware.api [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2034.724584] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5204eb47-a6ae-22e2-8651-f0b03020e2bf" [ 2034.724584] env[63241]: _type = "Task" [ 2034.724584] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.733513] env[63241]: DEBUG oslo_vmware.api [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5204eb47-a6ae-22e2-8651-f0b03020e2bf, 'name': SearchDatastore_Task, 'duration_secs': 0.00714} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.733746] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2034.953050] env[63241]: DEBUG oslo_vmware.api [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b969d0-d5bc-e1d4-1d36-67c27dfd0488, 'name': SearchDatastore_Task, 'duration_secs': 0.01588} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.953368] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.074085] env[63241]: DEBUG nova.scheduler.client.report [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2035.312616] env[63241]: DEBUG nova.objects.instance [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lazy-loading 'flavor' on Instance uuid 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2035.542198] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.578901] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.677s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.581375] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.848s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.599863] env[63241]: INFO nova.scheduler.client.report [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Deleted allocations for instance e4514260-dfcc-45a3-80d5-b5484b0b599c [ 2035.818061] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e0b689f2-821e-440f-9b35-ed2bbafd1edc tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.760s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.818259] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.276s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.107828] env[63241]: DEBUG oslo_concurrency.lockutils [None req-e433467f-d6b7-4827-8f33-6a36c361b2be tempest-AttachVolumeShelveTestJSON-832129514 tempest-AttachVolumeShelveTestJSON-832129514-project-member] Lock "e4514260-dfcc-45a3-80d5-b5484b0b599c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.611s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.183875] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef6c0c1-89de-417a-940c-2ded3c564972 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.191751] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0956680-b79f-4a8d-aef8-4f336bea2dee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.222859] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bd048e-77ab-4310-8acc-23b5951b8f1c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.231239] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c04356-9e66-4100-8be6-4f7b122b73c6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.243790] env[63241]: DEBUG nova.compute.provider_tree [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2036.321385] env[63241]: INFO nova.compute.manager [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Detaching volume c5e95465-81bd-4398-9449-346bdd72ba65 [ 2036.355059] env[63241]: INFO nova.virt.block_device [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Attempting to driver detach volume c5e95465-81bd-4398-9449-346bdd72ba65 from mountpoint /dev/sdb [ 2036.355383] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2036.355578] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377240', 'volume_id': 'c5e95465-81bd-4398-9449-346bdd72ba65', 'name': 'volume-c5e95465-81bd-4398-9449-346bdd72ba65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59', 'attached_at': '', 'detached_at': '', 'volume_id': 'c5e95465-81bd-4398-9449-346bdd72ba65', 'serial': 'c5e95465-81bd-4398-9449-346bdd72ba65'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2036.357008] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7213b8-f414-4418-a800-3fbb7b111d3d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.379674] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c42236b-65ea-485f-81ab-4bb58d8e4992 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.388632] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8103824-8ebe-4bdb-b394-150939e4fe8c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.412350] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e070f71-3974-4699-b0e1-1aea333114ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.428175] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] The volume has not been displaced from its original location: [datastore1] volume-c5e95465-81bd-4398-9449-346bdd72ba65/volume-c5e95465-81bd-4398-9449-346bdd72ba65.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2036.434247] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Reconfiguring VM instance instance-00000076 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2036.434621] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d45bffe-6a6c-47e9-8a24-cc94fc420023 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.453783] env[63241]: DEBUG oslo_vmware.api [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 2036.453783] env[63241]: value = "task-1821390" [ 2036.453783] env[63241]: _type = "Task" [ 2036.453783] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.462049] env[63241]: DEBUG oslo_vmware.api [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821390, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.747689] env[63241]: DEBUG nova.scheduler.client.report [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2036.964407] env[63241]: DEBUG oslo_vmware.api [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821390, 'name': ReconfigVM_Task, 'duration_secs': 0.215152} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.964675] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Reconfigured VM instance instance-00000076 to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2036.971980] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-241abaec-8f40-4aff-a337-5e074da5db76 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.987109] env[63241]: DEBUG oslo_vmware.api [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 2036.987109] env[63241]: value = "task-1821391" [ 2036.987109] env[63241]: _type = "Task" [ 2036.987109] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.995036] env[63241]: DEBUG oslo_vmware.api [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821391, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.496603] env[63241]: DEBUG oslo_vmware.api [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821391, 'name': ReconfigVM_Task, 'duration_secs': 0.388501} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.497116] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377240', 'volume_id': 'c5e95465-81bd-4398-9449-346bdd72ba65', 'name': 'volume-c5e95465-81bd-4398-9449-346bdd72ba65', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59', 'attached_at': '', 'detached_at': '', 'volume_id': 'c5e95465-81bd-4398-9449-346bdd72ba65', 'serial': 'c5e95465-81bd-4398-9449-346bdd72ba65'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2037.758039] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.176s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.761392] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.808s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.039894] env[63241]: DEBUG nova.objects.instance [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lazy-loading 'flavor' on Instance uuid 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2038.308071] env[63241]: INFO nova.compute.manager [None req-d8c65a68-96ef-4f0e-9b24-fd8b58f8e071 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Get console output [ 2038.308319] env[63241]: WARNING nova.virt.vmwareapi.driver [None req-d8c65a68-96ef-4f0e-9b24-fd8b58f8e071 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] The console log is missing. Check your VSPC configuration [ 2038.316653] env[63241]: INFO nova.scheduler.client.report [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted allocation for migration 270ab512-ba77-4b68-a65a-78ce58c70d25 [ 2038.351506] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2c2dc1-ab5d-450d-98c6-3cf92e681dc6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.360406] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d4be80-2511-44f6-b819-c6bb5e47defd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.391686] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da479b2-80ea-4b9e-a61c-f0e26679fca5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.399034] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2fad68-6902-42a9-974b-ab68f0d74f7d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.412345] env[63241]: DEBUG nova.compute.provider_tree [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2038.822230] env[63241]: DEBUG oslo_concurrency.lockutils [None req-31ec3248-0281-40c7-b7d5-75ec6df013a2 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.869s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.915322] env[63241]: DEBUG nova.scheduler.client.report [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2039.048035] env[63241]: DEBUG oslo_concurrency.lockutils [None req-28ea5c92-a844-469a-9369-33e1038cdb91 tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.230s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.777366] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.777627] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.777838] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.778034] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.778205] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.780447] env[63241]: INFO nova.compute.manager [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Terminating instance [ 2039.782145] env[63241]: DEBUG nova.compute.manager [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2039.782347] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2039.783191] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0738ad-c41f-4827-89cb-e4e2274b575d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.791305] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2039.791513] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e183dff-3527-400d-a1f5-c56532605bd4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.797875] env[63241]: DEBUG oslo_vmware.api [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 2039.797875] env[63241]: value = "task-1821393" [ 2039.797875] env[63241]: _type = "Task" [ 2039.797875] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.805274] env[63241]: DEBUG oslo_vmware.api [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821393, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.927227] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.166s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.308571] env[63241]: DEBUG oslo_vmware.api [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821393, 'name': PowerOffVM_Task, 'duration_secs': 0.223911} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.308859] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2040.309045] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2040.309300] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe29473b-24f3-47d8-b53c-626814e22573 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.393851] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2040.394098] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2040.394288] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleting the datastore file [datastore1] 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2040.394551] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b7eafb7-da9e-45f0-82fa-73271ac683e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.400931] env[63241]: DEBUG oslo_vmware.api [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for the task: (returnval){ [ 2040.400931] env[63241]: value = "task-1821395" [ 2040.400931] env[63241]: _type = "Task" [ 2040.400931] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.408534] env[63241]: DEBUG oslo_vmware.api [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821395, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.492635] env[63241]: INFO nova.scheduler.client.report [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleted allocation for migration 328dbc22-b6c0-4e90-bc54-27344128ce3b [ 2040.911544] env[63241]: DEBUG oslo_vmware.api [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Task: {'id': task-1821395, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155073} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.911822] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2040.912022] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2040.912212] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2040.912392] env[63241]: INFO nova.compute.manager [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2040.912679] env[63241]: DEBUG oslo.service.loopingcall [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2040.912883] env[63241]: DEBUG nova.compute.manager [-] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2040.912984] env[63241]: DEBUG nova.network.neutron [-] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2040.974191] env[63241]: DEBUG oslo_concurrency.lockutils [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "e28ba013-0bc5-4edc-858d-674980bc8742" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.998273] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9c15e765-3297-4c09-9285-a6195105cd12 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.335s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.999264] env[63241]: DEBUG oslo_concurrency.lockutils [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.025s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.999515] env[63241]: DEBUG oslo_concurrency.lockutils [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "e28ba013-0bc5-4edc-858d-674980bc8742-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.999721] env[63241]: DEBUG oslo_concurrency.lockutils [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.999881] env[63241]: DEBUG oslo_concurrency.lockutils [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.002348] env[63241]: INFO nova.compute.manager [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Terminating instance [ 2041.003807] env[63241]: DEBUG nova.compute.manager [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2041.004082] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2041.004859] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133e2de5-6e3e-411a-98e0-5a2351faf3a9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.013017] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2041.013150] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ee6fea9-b2ed-41b7-8777-1ceee6b43151 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.019945] env[63241]: DEBUG oslo_vmware.api [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2041.019945] env[63241]: value = "task-1821396" [ 2041.019945] env[63241]: _type = "Task" [ 2041.019945] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.028877] env[63241]: DEBUG oslo_vmware.api [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821396, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.531969] env[63241]: DEBUG oslo_vmware.api [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821396, 'name': PowerOffVM_Task, 'duration_secs': 0.207629} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.532332] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2041.535518] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2041.539977] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91b12d69-a5c8-439d-8f89-39c1ce6ea70d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.563245] env[63241]: DEBUG nova.compute.manager [req-7bb60696-013e-4d54-8f2d-9f58b590cfe9 req-e711601c-b055-44e4-bf65-1794c1c4aab0 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Received event network-vif-deleted-8611587f-7fb3-447f-81a7-8a4d5b83c554 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2041.563512] env[63241]: INFO nova.compute.manager [req-7bb60696-013e-4d54-8f2d-9f58b590cfe9 req-e711601c-b055-44e4-bf65-1794c1c4aab0 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Neutron deleted interface 8611587f-7fb3-447f-81a7-8a4d5b83c554; detaching it from the instance and deleting it from the info cache [ 2041.563731] env[63241]: DEBUG nova.network.neutron [req-7bb60696-013e-4d54-8f2d-9f58b590cfe9 req-e711601c-b055-44e4-bf65-1794c1c4aab0 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2041.690612] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2041.691437] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2041.691801] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleting the datastore file [datastore1] e28ba013-0bc5-4edc-858d-674980bc8742 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2041.692229] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d29c050a-8d14-4b32-942c-6a124d004eb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.699313] env[63241]: DEBUG oslo_vmware.api [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2041.699313] env[63241]: value = "task-1821398" [ 2041.699313] env[63241]: _type = "Task" [ 2041.699313] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.709144] env[63241]: DEBUG oslo_vmware.api [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821398, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.043437] env[63241]: DEBUG nova.network.neutron [-] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2042.069681] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54520373-30aa-4493-a9ba-cb333fd82bd2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.080899] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3b1791-e2ef-4b4a-92ff-71963a2c5974 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.108047] env[63241]: DEBUG nova.compute.manager [req-7bb60696-013e-4d54-8f2d-9f58b590cfe9 req-e711601c-b055-44e4-bf65-1794c1c4aab0 service nova] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Detach interface failed, port_id=8611587f-7fb3-447f-81a7-8a4d5b83c554, reason: Instance 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2042.210454] env[63241]: DEBUG oslo_vmware.api [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821398, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160989} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.210726] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2042.210997] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2042.211116] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2042.211270] env[63241]: INFO nova.compute.manager [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Took 1.21 seconds to destroy the instance on the hypervisor. [ 2042.211512] env[63241]: DEBUG oslo.service.loopingcall [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2042.211703] env[63241]: DEBUG nova.compute.manager [-] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2042.215024] env[63241]: DEBUG nova.network.neutron [-] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2042.548450] env[63241]: INFO nova.compute.manager [-] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Took 1.64 seconds to deallocate network for instance. [ 2043.057681] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2043.058043] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2043.058137] env[63241]: DEBUG nova.objects.instance [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lazy-loading 'resources' on Instance uuid 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2043.184028] env[63241]: DEBUG nova.network.neutron [-] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.593321] env[63241]: DEBUG nova.compute.manager [req-950bb0f8-15e7-437a-91a6-e77c6e1c6357 req-bc292117-56a6-44c9-b1df-b706529a71db service nova] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Received event network-vif-deleted-6be29b8b-a0d6-4346-b774-5faf878f177c {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2043.643291] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6106a9c-3372-4700-9651-352bf5e40cbc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.652241] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fd7211-fde3-460f-a0d3-1247ce5575d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.682586] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cd9e03-c262-4089-829c-5fd1c42ab995 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.686668] env[63241]: INFO nova.compute.manager [-] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Took 1.47 seconds to deallocate network for instance. [ 2043.694175] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9b2b35-867b-4011-bf1a-ef855b9b50d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.709724] env[63241]: DEBUG nova.compute.provider_tree [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2044.200711] env[63241]: DEBUG oslo_concurrency.lockutils [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.212821] env[63241]: DEBUG nova.scheduler.client.report [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2044.718270] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.660s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.721369] env[63241]: DEBUG oslo_concurrency.lockutils [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.520s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.721493] env[63241]: DEBUG oslo_concurrency.lockutils [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.737092] env[63241]: INFO nova.scheduler.client.report [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Deleted allocations for instance 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59 [ 2044.738881] env[63241]: INFO nova.scheduler.client.report [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleted allocations for instance e28ba013-0bc5-4edc-858d-674980bc8742 [ 2045.250244] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f69744aa-3757-4795-a6f0-f7abee05c82b tempest-AttachVolumeNegativeTest-1960000804 tempest-AttachVolumeNegativeTest-1960000804-project-member] Lock "8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.473s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.251232] env[63241]: DEBUG oslo_concurrency.lockutils [None req-212eea03-661c-4649-aedf-08ae204f847d tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "e28ba013-0bc5-4edc-858d-674980bc8742" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.252s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.978622] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "642c896e-64f8-499c-8498-6ad756de8b70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.978854] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "642c896e-64f8-499c-8498-6ad756de8b70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.482723] env[63241]: DEBUG nova.compute.manager [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2047.004388] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.004657] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.006456] env[63241]: INFO nova.compute.claims [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2048.069246] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b2db87-aa76-44e4-b859-560feebdba84 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.076687] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855ed5e5-ccd2-4527-8d3f-d4eaba2b93e3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.105658] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5334895d-0b30-42ed-a7a8-125de7200975 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.112325] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce042949-96a7-4bee-a865-3840772cf485 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.126917] env[63241]: DEBUG nova.compute.provider_tree [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2048.630417] env[63241]: DEBUG nova.scheduler.client.report [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2049.135950] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.136471] env[63241]: DEBUG nova.compute.manager [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2049.642320] env[63241]: DEBUG nova.compute.utils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2049.644086] env[63241]: DEBUG nova.compute.manager [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2049.644263] env[63241]: DEBUG nova.network.neutron [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2049.694592] env[63241]: DEBUG nova.policy [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c00391826fd242709ad7947610554fc2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '855da29218ba4391a208e2835f60ee11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 2049.949013] env[63241]: DEBUG nova.network.neutron [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Successfully created port: 861b579e-636a-499a-bf17-f0ef5804a903 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2050.147627] env[63241]: DEBUG nova.compute.manager [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2051.157464] env[63241]: DEBUG nova.compute.manager [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2051.184549] env[63241]: DEBUG nova.virt.hardware [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2051.184789] env[63241]: DEBUG nova.virt.hardware [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2051.184946] env[63241]: DEBUG nova.virt.hardware [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2051.185154] env[63241]: DEBUG nova.virt.hardware [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2051.185306] env[63241]: DEBUG nova.virt.hardware [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2051.185452] env[63241]: DEBUG nova.virt.hardware [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2051.185660] env[63241]: DEBUG nova.virt.hardware [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2051.185819] env[63241]: DEBUG nova.virt.hardware [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2051.185985] env[63241]: DEBUG nova.virt.hardware [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2051.186172] env[63241]: DEBUG nova.virt.hardware [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2051.186346] env[63241]: DEBUG nova.virt.hardware [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2051.187270] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8029191-0d9f-4e6d-9b83-af96fa3949af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.195077] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ce3778-719e-4fb6-ba83-a1f1e5c09830 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.213136] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2053.213465] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2053.213573] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2053.213797] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2055.365448] env[63241]: DEBUG nova.compute.manager [req-e2985f0e-fbe0-49a0-be41-e949be930344 req-5494c2d1-ee88-4877-ba20-bf744ad35ded service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Received event network-vif-plugged-861b579e-636a-499a-bf17-f0ef5804a903 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2055.365673] env[63241]: DEBUG oslo_concurrency.lockutils [req-e2985f0e-fbe0-49a0-be41-e949be930344 req-5494c2d1-ee88-4877-ba20-bf744ad35ded service nova] Acquiring lock "642c896e-64f8-499c-8498-6ad756de8b70-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2055.365760] env[63241]: DEBUG oslo_concurrency.lockutils [req-e2985f0e-fbe0-49a0-be41-e949be930344 req-5494c2d1-ee88-4877-ba20-bf744ad35ded service nova] Lock "642c896e-64f8-499c-8498-6ad756de8b70-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2055.365962] env[63241]: DEBUG oslo_concurrency.lockutils [req-e2985f0e-fbe0-49a0-be41-e949be930344 req-5494c2d1-ee88-4877-ba20-bf744ad35ded service nova] Lock "642c896e-64f8-499c-8498-6ad756de8b70-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2055.366450] env[63241]: DEBUG nova.compute.manager [req-e2985f0e-fbe0-49a0-be41-e949be930344 req-5494c2d1-ee88-4877-ba20-bf744ad35ded service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] No waiting events found dispatching network-vif-plugged-861b579e-636a-499a-bf17-f0ef5804a903 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2055.366697] env[63241]: WARNING nova.compute.manager [req-e2985f0e-fbe0-49a0-be41-e949be930344 req-5494c2d1-ee88-4877-ba20-bf744ad35ded service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Received unexpected event network-vif-plugged-861b579e-636a-499a-bf17-f0ef5804a903 for instance with vm_state building and task_state spawning. [ 2055.461180] env[63241]: DEBUG nova.network.neutron [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Successfully updated port: 861b579e-636a-499a-bf17-f0ef5804a903 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2055.964414] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2055.964581] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2055.964740] env[63241]: DEBUG nova.network.neutron [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2056.452600] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2056.453035] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2056.453241] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2056.496225] env[63241]: DEBUG nova.network.neutron [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2056.627830] env[63241]: DEBUG nova.network.neutron [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance_info_cache with network_info: [{"id": "861b579e-636a-499a-bf17-f0ef5804a903", "address": "fa:16:3e:20:9b:c0", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861b579e-63", "ovs_interfaceid": "861b579e-636a-499a-bf17-f0ef5804a903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2056.958152] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Skipping network cache update for instance because it is Building. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 2056.988446] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2056.988614] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2056.988767] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2056.988919] env[63241]: DEBUG nova.objects.instance [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lazy-loading 'info_cache' on Instance uuid 31e84206-e583-4610-969e-2ccae2d0b206 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2057.130181] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2057.130542] env[63241]: DEBUG nova.compute.manager [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Instance network_info: |[{"id": "861b579e-636a-499a-bf17-f0ef5804a903", "address": "fa:16:3e:20:9b:c0", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861b579e-63", "ovs_interfaceid": "861b579e-636a-499a-bf17-f0ef5804a903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2057.130989] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:9b:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '861b579e-636a-499a-bf17-f0ef5804a903', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2057.138813] env[63241]: DEBUG oslo.service.loopingcall [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2057.139050] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2057.139284] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-679908a7-bc76-44b8-bb51-65a28d11a68a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.159974] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2057.159974] env[63241]: value = "task-1821402" [ 2057.159974] env[63241]: _type = "Task" [ 2057.159974] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.168191] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821402, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.401638] env[63241]: DEBUG nova.compute.manager [req-6a8bed2f-85e1-409f-aa21-70d52190a7b3 req-82991285-aca8-49fd-be60-7ee002c10c48 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Received event network-changed-861b579e-636a-499a-bf17-f0ef5804a903 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2057.401804] env[63241]: DEBUG nova.compute.manager [req-6a8bed2f-85e1-409f-aa21-70d52190a7b3 req-82991285-aca8-49fd-be60-7ee002c10c48 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Refreshing instance network info cache due to event network-changed-861b579e-636a-499a-bf17-f0ef5804a903. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2057.402295] env[63241]: DEBUG oslo_concurrency.lockutils [req-6a8bed2f-85e1-409f-aa21-70d52190a7b3 req-82991285-aca8-49fd-be60-7ee002c10c48 service nova] Acquiring lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.402425] env[63241]: DEBUG oslo_concurrency.lockutils [req-6a8bed2f-85e1-409f-aa21-70d52190a7b3 req-82991285-aca8-49fd-be60-7ee002c10c48 service nova] Acquired lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.402601] env[63241]: DEBUG nova.network.neutron [req-6a8bed2f-85e1-409f-aa21-70d52190a7b3 req-82991285-aca8-49fd-be60-7ee002c10c48 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Refreshing network info cache for port 861b579e-636a-499a-bf17-f0ef5804a903 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2057.670940] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821402, 'name': CreateVM_Task, 'duration_secs': 0.384603} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.671210] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2057.671840] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.672014] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.672361] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2057.672604] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ebc918f-a394-4428-8c85-0a1d325f1ec6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.677383] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2057.677383] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52b05d22-c1a3-22cc-9af4-d3b7e1d2d88f" [ 2057.677383] env[63241]: _type = "Task" [ 2057.677383] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.685611] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b05d22-c1a3-22cc-9af4-d3b7e1d2d88f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.129661] env[63241]: DEBUG nova.network.neutron [req-6a8bed2f-85e1-409f-aa21-70d52190a7b3 req-82991285-aca8-49fd-be60-7ee002c10c48 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updated VIF entry in instance network info cache for port 861b579e-636a-499a-bf17-f0ef5804a903. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2058.130034] env[63241]: DEBUG nova.network.neutron [req-6a8bed2f-85e1-409f-aa21-70d52190a7b3 req-82991285-aca8-49fd-be60-7ee002c10c48 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance_info_cache with network_info: [{"id": "861b579e-636a-499a-bf17-f0ef5804a903", "address": "fa:16:3e:20:9b:c0", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861b579e-63", "ovs_interfaceid": "861b579e-636a-499a-bf17-f0ef5804a903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.187605] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52b05d22-c1a3-22cc-9af4-d3b7e1d2d88f, 'name': SearchDatastore_Task, 'duration_secs': 0.011845} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.187847] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.188082] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2058.188321] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2058.188462] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2058.188639] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2058.188882] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e0beb9a-c474-4bd8-bd4a-9ebae6c97eb3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.196682] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2058.196850] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2058.197559] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fb20caa-22a8-48ee-9368-d86f75eebf7d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.203604] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2058.203604] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]522c2afd-0152-611e-c3eb-6edb01979205" [ 2058.203604] env[63241]: _type = "Task" [ 2058.203604] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.210424] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522c2afd-0152-611e-c3eb-6edb01979205, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.632928] env[63241]: DEBUG oslo_concurrency.lockutils [req-6a8bed2f-85e1-409f-aa21-70d52190a7b3 req-82991285-aca8-49fd-be60-7ee002c10c48 service nova] Releasing lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.716514] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]522c2afd-0152-611e-c3eb-6edb01979205, 'name': SearchDatastore_Task, 'duration_secs': 0.013593} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.717377] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffde8314-99a6-4bc3-92d5-da3f98ab08de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.722335] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2058.722335] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e6beae-a309-d2b1-e972-bebfbeaa9c10" [ 2058.722335] env[63241]: _type = "Task" [ 2058.722335] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.729843] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e6beae-a309-d2b1-e972-bebfbeaa9c10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.747605] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updating instance_info_cache with network_info: [{"id": "6bc11935-f0d5-456c-b815-ea415689a621", "address": "fa:16:3e:de:e7:97", "network": {"id": "3fe79bbe-c2a5-4163-a73b-9c9874b59bf2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2023973277-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6964b0dd75c4704b8f5cacd2c8e355f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bc11935-f0", "ovs_interfaceid": "6bc11935-f0d5-456c-b815-ea415689a621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2059.232399] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e6beae-a309-d2b1-e972-bebfbeaa9c10, 'name': SearchDatastore_Task, 'duration_secs': 0.010017} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.232689] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2059.232943] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 642c896e-64f8-499c-8498-6ad756de8b70/642c896e-64f8-499c-8498-6ad756de8b70.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2059.233215] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5350b900-872d-471c-a357-22c86cfa34d9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.240178] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2059.240178] env[63241]: value = "task-1821403" [ 2059.240178] env[63241]: _type = "Task" [ 2059.240178] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.248041] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.250513] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-31e84206-e583-4610-969e-2ccae2d0b206" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2059.250693] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2059.250875] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2059.251040] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2059.251197] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2059.452702] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2059.750513] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.440257} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.750819] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 642c896e-64f8-499c-8498-6ad756de8b70/642c896e-64f8-499c-8498-6ad756de8b70.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2059.751047] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2059.751313] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75867ad3-f7db-40e4-aafe-c619af6bc882 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.758899] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2059.758899] env[63241]: value = "task-1821404" [ 2059.758899] env[63241]: _type = "Task" [ 2059.758899] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.767650] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.267952] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070213} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.268248] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2060.269043] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d9a89d-7e4a-43ba-af51-0abec6ef1c20 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.290609] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 642c896e-64f8-499c-8498-6ad756de8b70/642c896e-64f8-499c-8498-6ad756de8b70.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2060.290846] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35165af7-46fa-4005-93e2-1ac8786a4e12 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.310814] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2060.310814] env[63241]: value = "task-1821405" [ 2060.310814] env[63241]: _type = "Task" [ 2060.310814] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.318317] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.820372] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821405, 'name': ReconfigVM_Task, 'duration_secs': 0.287711} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.820768] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 642c896e-64f8-499c-8498-6ad756de8b70/642c896e-64f8-499c-8498-6ad756de8b70.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2060.821312] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4017f5cc-6ae4-4684-ae3c-4156e5a75ead {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.827567] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2060.827567] env[63241]: value = "task-1821406" [ 2060.827567] env[63241]: _type = "Task" [ 2060.827567] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.836940] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821406, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.337520] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821406, 'name': Rename_Task, 'duration_secs': 0.162456} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.337777] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2061.338037] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af1b37f0-76e4-4bc0-94c3-fe97469bfd45 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.344283] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2061.344283] env[63241]: value = "task-1821407" [ 2061.344283] env[63241]: _type = "Task" [ 2061.344283] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.351259] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821407, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.856636] env[63241]: DEBUG oslo_vmware.api [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821407, 'name': PowerOnVM_Task, 'duration_secs': 0.488911} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.857097] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2061.857167] env[63241]: INFO nova.compute.manager [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Took 10.70 seconds to spawn the instance on the hypervisor. [ 2061.857343] env[63241]: DEBUG nova.compute.manager [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2061.858139] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d67e7f9-8c9e-45bd-9adf-1f716a5d415a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.378479] env[63241]: INFO nova.compute.manager [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Took 15.39 seconds to build instance. [ 2062.565129] env[63241]: DEBUG nova.compute.manager [req-d4aecabe-01a3-40f0-a5ce-c9ef2365e966 req-c76e19d5-4362-45b1-b41a-c21458ec4352 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Received event network-changed-861b579e-636a-499a-bf17-f0ef5804a903 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2062.565340] env[63241]: DEBUG nova.compute.manager [req-d4aecabe-01a3-40f0-a5ce-c9ef2365e966 req-c76e19d5-4362-45b1-b41a-c21458ec4352 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Refreshing instance network info cache due to event network-changed-861b579e-636a-499a-bf17-f0ef5804a903. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2062.565556] env[63241]: DEBUG oslo_concurrency.lockutils [req-d4aecabe-01a3-40f0-a5ce-c9ef2365e966 req-c76e19d5-4362-45b1-b41a-c21458ec4352 service nova] Acquiring lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.565710] env[63241]: DEBUG oslo_concurrency.lockutils [req-d4aecabe-01a3-40f0-a5ce-c9ef2365e966 req-c76e19d5-4362-45b1-b41a-c21458ec4352 service nova] Acquired lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.565874] env[63241]: DEBUG nova.network.neutron [req-d4aecabe-01a3-40f0-a5ce-c9ef2365e966 req-c76e19d5-4362-45b1-b41a-c21458ec4352 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Refreshing network info cache for port 861b579e-636a-499a-bf17-f0ef5804a903 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2062.880671] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bf19aaef-e785-43ae-b03c-7986d9a98ba8 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "642c896e-64f8-499c-8498-6ad756de8b70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.902s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.277993] env[63241]: DEBUG nova.network.neutron [req-d4aecabe-01a3-40f0-a5ce-c9ef2365e966 req-c76e19d5-4362-45b1-b41a-c21458ec4352 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updated VIF entry in instance network info cache for port 861b579e-636a-499a-bf17-f0ef5804a903. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2063.278386] env[63241]: DEBUG nova.network.neutron [req-d4aecabe-01a3-40f0-a5ce-c9ef2365e966 req-c76e19d5-4362-45b1-b41a-c21458ec4352 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance_info_cache with network_info: [{"id": "861b579e-636a-499a-bf17-f0ef5804a903", "address": "fa:16:3e:20:9b:c0", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861b579e-63", "ovs_interfaceid": "861b579e-636a-499a-bf17-f0ef5804a903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.780882] env[63241]: DEBUG oslo_concurrency.lockutils [req-d4aecabe-01a3-40f0-a5ce-c9ef2365e966 req-c76e19d5-4362-45b1-b41a-c21458ec4352 service nova] Releasing lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2064.447627] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2064.590416] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.590687] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.590904] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.591108] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.591289] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.593706] env[63241]: INFO nova.compute.manager [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Terminating instance [ 2064.595664] env[63241]: DEBUG nova.compute.manager [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2064.595880] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2064.596132] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec917829-750b-475c-9bde-ab6209628059 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.604303] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2064.604303] env[63241]: value = "task-1821408" [ 2064.604303] env[63241]: _type = "Task" [ 2064.604303] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.612499] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.952647] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2065.115914] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821408, 'name': PowerOffVM_Task, 'duration_secs': 0.184387} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.116218] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2065.116422] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2065.116616] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377237', 'volume_id': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'name': 'volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'e2aea319-280e-4dc8-9c90-f080cdf2a08a', 'attached_at': '2024-12-12T15:34:45.000000', 'detached_at': '', 'volume_id': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'serial': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2065.117467] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775bf900-91db-4a58-9aa7-6ec716f4d977 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.135720] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427840ed-ca88-4065-8eb5-8b3674de426e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.141963] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59572ae1-e297-4567-a6bb-fbb6dbfd3402 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.158894] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b40011-9cd7-4c14-81f5-7e422946ee27 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.172770] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] The volume has not been displaced from its original location: [datastore1] volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e/volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2065.178019] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2065.178282] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd15a91b-0071-4752-a8a7-51a468619605 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.195640] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2065.195640] env[63241]: value = "task-1821409" [ 2065.195640] env[63241]: _type = "Task" [ 2065.195640] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.203194] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821409, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.455944] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.456322] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.456322] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.456466] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2065.457412] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d73b65b-f40b-41fb-aba2-fe0222750f84 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.465435] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6330f8-2c00-4daa-ae07-6d86b8291caf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.479327] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ada917c-2488-4297-ac26-1518cdf10df8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.485336] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ff4aed-5052-4f84-9549-bc219a675cee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.514736] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180127MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2065.514882] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.515091] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.705435] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821409, 'name': ReconfigVM_Task, 'duration_secs': 0.163663} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.705736] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2065.710449] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d231950f-79dc-4e7b-8547-21edd20aab9d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.725604] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2065.725604] env[63241]: value = "task-1821410" [ 2065.725604] env[63241]: _type = "Task" [ 2065.725604] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.733388] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821410, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.235321] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821410, 'name': ReconfigVM_Task, 'duration_secs': 0.383763} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.235612] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377237', 'volume_id': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'name': 'volume-1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'e2aea319-280e-4dc8-9c90-f080cdf2a08a', 'attached_at': '2024-12-12T15:34:45.000000', 'detached_at': '', 'volume_id': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e', 'serial': '1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2066.235879] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2066.236624] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754da6f2-3838-444f-b5d1-1fbffd36dde5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.242749] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2066.242955] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fba7887c-1a22-418b-b696-3e423d03a9c8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.487152] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2066.487504] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2066.487548] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleting the datastore file [datastore1] e2aea319-280e-4dc8-9c90-f080cdf2a08a {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2066.487812] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67a51595-5f1f-4d97-be17-78f65af6ce4a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.495470] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2066.495470] env[63241]: value = "task-1821412" [ 2066.495470] env[63241]: _type = "Task" [ 2066.495470] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.502570] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821412, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.605724] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 31e84206-e583-4610-969e-2ccae2d0b206 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2066.605881] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 43684f7f-0a5d-48e5-8ab6-573db8d81ff0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2066.606015] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance e2aea319-280e-4dc8-9c90-f080cdf2a08a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2066.606171] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 642c896e-64f8-499c-8498-6ad756de8b70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2066.606367] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2066.606508] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1344MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2066.658229] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82be328c-e541-422c-9c91-8a4431638bc4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.665698] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1e347e-1267-4ee9-9e70-10417467ccdd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.696041] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79afdd0-531e-4912-85c8-f82945a5ef42 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.703029] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c843fb6-b876-4bc9-9f1a-cd7c38736a31 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.715732] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2067.005435] env[63241]: DEBUG oslo_vmware.api [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821412, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087322} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.005670] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2067.005826] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2067.006017] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2067.006204] env[63241]: INFO nova.compute.manager [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Took 2.41 seconds to destroy the instance on the hypervisor. [ 2067.006443] env[63241]: DEBUG oslo.service.loopingcall [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2067.006632] env[63241]: DEBUG nova.compute.manager [-] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2067.006722] env[63241]: DEBUG nova.network.neutron [-] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2067.219245] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2067.434483] env[63241]: DEBUG nova.compute.manager [req-ef1e1bbe-1e8a-46de-9d9d-495330633c3f req-3539fcbf-75db-47ab-ba3f-cdd4c0141cec service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Received event network-vif-deleted-b4f2cf51-6a1b-455b-a0dc-72616da10ed0 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2067.434685] env[63241]: INFO nova.compute.manager [req-ef1e1bbe-1e8a-46de-9d9d-495330633c3f req-3539fcbf-75db-47ab-ba3f-cdd4c0141cec service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Neutron deleted interface b4f2cf51-6a1b-455b-a0dc-72616da10ed0; detaching it from the instance and deleting it from the info cache [ 2067.435064] env[63241]: DEBUG nova.network.neutron [req-ef1e1bbe-1e8a-46de-9d9d-495330633c3f req-3539fcbf-75db-47ab-ba3f-cdd4c0141cec service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.724112] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2067.724477] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.209s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.914111] env[63241]: DEBUG nova.network.neutron [-] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.938653] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3d7ca3c-aa81-4dc4-8c9e-59a345d2de01 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.949128] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a55dd7-7787-4b59-ace8-453920ef95f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.974394] env[63241]: DEBUG nova.compute.manager [req-ef1e1bbe-1e8a-46de-9d9d-495330633c3f req-3539fcbf-75db-47ab-ba3f-cdd4c0141cec service nova] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Detach interface failed, port_id=b4f2cf51-6a1b-455b-a0dc-72616da10ed0, reason: Instance e2aea319-280e-4dc8-9c90-f080cdf2a08a could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2068.417438] env[63241]: INFO nova.compute.manager [-] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Took 1.41 seconds to deallocate network for instance. [ 2068.962301] env[63241]: INFO nova.compute.manager [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Took 0.54 seconds to detach 1 volumes for instance. [ 2068.964511] env[63241]: DEBUG nova.compute.manager [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Deleting volume: 1be1a1bc-ecb3-48c9-ab1c-20d05c058a0e {{(pid=63241) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 2069.504863] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.505173] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.505404] env[63241]: DEBUG nova.objects.instance [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lazy-loading 'resources' on Instance uuid e2aea319-280e-4dc8-9c90-f080cdf2a08a {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2070.068619] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37dbfb19-eabf-4253-9a5f-1e2922267e35 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.075586] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7801c0-fee7-4439-9445-12466d8db942 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.104926] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68623b45-3cda-4b55-85af-e1fbf612d2de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.112413] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49c97ee-92c4-4beb-ac48-a564eeffd2dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.125269] env[63241]: DEBUG nova.compute.provider_tree [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2070.628232] env[63241]: DEBUG nova.scheduler.client.report [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2071.132861] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2071.155914] env[63241]: INFO nova.scheduler.client.report [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted allocations for instance e2aea319-280e-4dc8-9c90-f080cdf2a08a [ 2071.663198] env[63241]: DEBUG oslo_concurrency.lockutils [None req-4903cc19-17b7-4b42-9ce0-d056068c0c5e tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "e2aea319-280e-4dc8-9c90-f080cdf2a08a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.072s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.241372] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.241828] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.241965] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.242174] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.242348] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.244515] env[63241]: INFO nova.compute.manager [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Terminating instance [ 2072.246339] env[63241]: DEBUG nova.compute.manager [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2072.246531] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2072.247419] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0966c855-2903-48fd-9a46-4befc88bfb57 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.255321] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2072.255536] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46ff3986-9faa-4168-89bb-3661b46dccc4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.262462] env[63241]: DEBUG oslo_vmware.api [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2072.262462] env[63241]: value = "task-1821414" [ 2072.262462] env[63241]: _type = "Task" [ 2072.262462] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.270015] env[63241]: DEBUG oslo_vmware.api [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821414, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.772444] env[63241]: DEBUG oslo_vmware.api [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821414, 'name': PowerOffVM_Task, 'duration_secs': 0.202679} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.772675] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2072.772834] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2072.773123] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39f660fa-a32c-4a69-bd74-0e375a2ff2ce {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.471127] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2075.471542] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2075.471542] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleting the datastore file [datastore1] 43684f7f-0a5d-48e5-8ab6-573db8d81ff0 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2075.471925] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc0d2279-00cf-4116-a22c-e4333a6c3ad1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.478183] env[63241]: DEBUG oslo_vmware.api [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2075.478183] env[63241]: value = "task-1821416" [ 2075.478183] env[63241]: _type = "Task" [ 2075.478183] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.485708] env[63241]: DEBUG oslo_vmware.api [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821416, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.988339] env[63241]: DEBUG oslo_vmware.api [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821416, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133315} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.988604] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2075.988824] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2075.989062] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2075.989260] env[63241]: INFO nova.compute.manager [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Took 3.74 seconds to destroy the instance on the hypervisor. [ 2075.989533] env[63241]: DEBUG oslo.service.loopingcall [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2075.989756] env[63241]: DEBUG nova.compute.manager [-] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2075.989862] env[63241]: DEBUG nova.network.neutron [-] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2076.229939] env[63241]: DEBUG nova.compute.manager [req-0f687d31-e1d9-45d7-bb55-eda4082dfc23 req-1b0a2750-bb3f-4806-b0bb-77ef790edbbf service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Received event network-vif-deleted-4d328bf7-9485-42e2-9ad2-0b00ac726b9b {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2076.230144] env[63241]: INFO nova.compute.manager [req-0f687d31-e1d9-45d7-bb55-eda4082dfc23 req-1b0a2750-bb3f-4806-b0bb-77ef790edbbf service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Neutron deleted interface 4d328bf7-9485-42e2-9ad2-0b00ac726b9b; detaching it from the instance and deleting it from the info cache [ 2076.230304] env[63241]: DEBUG nova.network.neutron [req-0f687d31-e1d9-45d7-bb55-eda4082dfc23 req-1b0a2750-bb3f-4806-b0bb-77ef790edbbf service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2076.710342] env[63241]: DEBUG nova.network.neutron [-] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2076.733405] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d198a1d-c7bf-4a3c-9184-3752b61a79ee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.743343] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13add25b-f4b8-4655-99ad-7dd91ae1bd46 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.768534] env[63241]: DEBUG nova.compute.manager [req-0f687d31-e1d9-45d7-bb55-eda4082dfc23 req-1b0a2750-bb3f-4806-b0bb-77ef790edbbf service nova] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Detach interface failed, port_id=4d328bf7-9485-42e2-9ad2-0b00ac726b9b, reason: Instance 43684f7f-0a5d-48e5-8ab6-573db8d81ff0 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2077.213375] env[63241]: INFO nova.compute.manager [-] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Took 1.22 seconds to deallocate network for instance. [ 2077.720528] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.720901] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.721071] env[63241]: DEBUG nova.objects.instance [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lazy-loading 'resources' on Instance uuid 43684f7f-0a5d-48e5-8ab6-573db8d81ff0 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2078.278656] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95503b6c-5a16-4a58-9fba-fa3bb2066e86 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.286097] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02ea0cd-7ef3-4022-894c-613f93fd8965 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.317058] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b538872-b4eb-4a46-8081-8ebecb57c061 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.323415] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f423761b-4966-4fcc-b6fc-eb318336ac33 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.336175] env[63241]: DEBUG nova.compute.provider_tree [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2078.839381] env[63241]: DEBUG nova.scheduler.client.report [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2079.344226] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.623s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2079.364338] env[63241]: INFO nova.scheduler.client.report [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted allocations for instance 43684f7f-0a5d-48e5-8ab6-573db8d81ff0 [ 2079.872259] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3da80201-420a-4662-9e34-544a01899544 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "43684f7f-0a5d-48e5-8ab6-573db8d81ff0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.630s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.121357] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "31e84206-e583-4610-969e-2ccae2d0b206" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.121722] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "31e84206-e583-4610-969e-2ccae2d0b206" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.121820] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "31e84206-e583-4610-969e-2ccae2d0b206-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.121996] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "31e84206-e583-4610-969e-2ccae2d0b206-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.122198] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "31e84206-e583-4610-969e-2ccae2d0b206-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.124744] env[63241]: INFO nova.compute.manager [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Terminating instance [ 2084.126590] env[63241]: DEBUG nova.compute.manager [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2084.126789] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2084.127698] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614a3b05-0495-4dda-a456-f5345b5fcb37 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.135523] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2084.135740] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63a5c73e-1558-44e6-b38b-b3e91701e8b1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.142675] env[63241]: DEBUG oslo_vmware.api [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2084.142675] env[63241]: value = "task-1821418" [ 2084.142675] env[63241]: _type = "Task" [ 2084.142675] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.150868] env[63241]: DEBUG oslo_vmware.api [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821418, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.652429] env[63241]: DEBUG oslo_vmware.api [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821418, 'name': PowerOffVM_Task, 'duration_secs': 0.185971} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.652703] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2084.652874] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2084.653136] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87c62b00-e7a2-48f1-bd6c-a6010ffae549 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.756590] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2084.756863] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2084.757020] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleting the datastore file [datastore1] 31e84206-e583-4610-969e-2ccae2d0b206 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2084.757298] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29947b5f-4a92-4d90-986a-49e62ebdf768 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.764798] env[63241]: DEBUG oslo_vmware.api [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for the task: (returnval){ [ 2084.764798] env[63241]: value = "task-1821420" [ 2084.764798] env[63241]: _type = "Task" [ 2084.764798] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.773143] env[63241]: DEBUG oslo_vmware.api [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.274464] env[63241]: DEBUG oslo_vmware.api [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Task: {'id': task-1821420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128947} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.274853] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2085.274988] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2085.275201] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2085.275379] env[63241]: INFO nova.compute.manager [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2085.275619] env[63241]: DEBUG oslo.service.loopingcall [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2085.275810] env[63241]: DEBUG nova.compute.manager [-] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2085.275911] env[63241]: DEBUG nova.network.neutron [-] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2085.505306] env[63241]: DEBUG nova.compute.manager [req-6492d3c3-5dbb-4573-a5e2-6e056abcf4fd req-6b129ba0-68c9-4e6d-9e24-ff02774a1353 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Received event network-vif-deleted-6bc11935-f0d5-456c-b815-ea415689a621 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2085.505497] env[63241]: INFO nova.compute.manager [req-6492d3c3-5dbb-4573-a5e2-6e056abcf4fd req-6b129ba0-68c9-4e6d-9e24-ff02774a1353 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Neutron deleted interface 6bc11935-f0d5-456c-b815-ea415689a621; detaching it from the instance and deleting it from the info cache [ 2085.505676] env[63241]: DEBUG nova.network.neutron [req-6492d3c3-5dbb-4573-a5e2-6e056abcf4fd req-6b129ba0-68c9-4e6d-9e24-ff02774a1353 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2085.979415] env[63241]: DEBUG nova.network.neutron [-] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2086.008488] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-687335d6-4ba8-49d7-aca8-23721fe747b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.018273] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba32b534-21c8-431f-978a-b558158fffda {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.043484] env[63241]: DEBUG nova.compute.manager [req-6492d3c3-5dbb-4573-a5e2-6e056abcf4fd req-6b129ba0-68c9-4e6d-9e24-ff02774a1353 service nova] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Detach interface failed, port_id=6bc11935-f0d5-456c-b815-ea415689a621, reason: Instance 31e84206-e583-4610-969e-2ccae2d0b206 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2086.481698] env[63241]: INFO nova.compute.manager [-] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Took 1.21 seconds to deallocate network for instance. [ 2086.988227] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2086.988516] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.988746] env[63241]: DEBUG nova.objects.instance [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lazy-loading 'resources' on Instance uuid 31e84206-e583-4610-969e-2ccae2d0b206 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2087.534802] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc7c568-a4bf-4201-96d2-caa8ecbb5eba {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.542574] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9436ff5-7786-455f-b38d-f981599d5a40 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.573914] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80cdd7a3-6bf8-4daf-be46-d0918891d743 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.580850] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a081ff6c-5bee-4662-a038-dde1b162ec7e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.593951] env[63241]: DEBUG nova.compute.provider_tree [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2088.096952] env[63241]: DEBUG nova.scheduler.client.report [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2088.601501] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.613s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.622645] env[63241]: INFO nova.scheduler.client.report [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Deleted allocations for instance 31e84206-e583-4610-969e-2ccae2d0b206 [ 2089.131689] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3269d26c-c954-432e-8a86-83b3358bea94 tempest-ServerActionsTestOtherA-1407622275 tempest-ServerActionsTestOtherA-1407622275-project-member] Lock "31e84206-e583-4610-969e-2ccae2d0b206" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.009s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2094.267344] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquiring lock "db514556-34d3-4a55-97e6-69b848e8f2ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.267754] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Lock "db514556-34d3-4a55-97e6-69b848e8f2ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.769888] env[63241]: DEBUG nova.compute.manager [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2095.291785] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.292144] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2095.293634] env[63241]: INFO nova.compute.claims [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2096.338596] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c97bc30-5001-4808-aeb0-14ccf6dc842a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.346129] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0807d694-ebda-402b-9f33-51c7bf9fabff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.377458] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad429df-585b-44de-92fb-a78497a00d99 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.384474] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7349e07c-abae-4dec-b3d8-40238f5d7695 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.397496] env[63241]: DEBUG nova.compute.provider_tree [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2096.900231] env[63241]: DEBUG nova.scheduler.client.report [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2097.405351] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.113s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.405835] env[63241]: DEBUG nova.compute.manager [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2097.911255] env[63241]: DEBUG nova.compute.utils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2097.912672] env[63241]: DEBUG nova.compute.manager [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2097.912853] env[63241]: DEBUG nova.network.neutron [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2097.951108] env[63241]: DEBUG nova.policy [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2f89ac069921429d9f9c59c0d63d317b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8e46bdedc1148b3a631b0e1fa30c4cb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 2098.184688] env[63241]: DEBUG nova.network.neutron [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Successfully created port: e992cfc6-fb5a-4aa4-be5a-70d949fa8157 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2098.416486] env[63241]: DEBUG nova.compute.manager [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2099.426729] env[63241]: DEBUG nova.compute.manager [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2099.450708] env[63241]: DEBUG nova.virt.hardware [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2099.450970] env[63241]: DEBUG nova.virt.hardware [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2099.451142] env[63241]: DEBUG nova.virt.hardware [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2099.451327] env[63241]: DEBUG nova.virt.hardware [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2099.451477] env[63241]: DEBUG nova.virt.hardware [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2099.451625] env[63241]: DEBUG nova.virt.hardware [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2099.451832] env[63241]: DEBUG nova.virt.hardware [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2099.451993] env[63241]: DEBUG nova.virt.hardware [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2099.452175] env[63241]: DEBUG nova.virt.hardware [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2099.452339] env[63241]: DEBUG nova.virt.hardware [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2099.452510] env[63241]: DEBUG nova.virt.hardware [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2099.453404] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6066026-c1e4-4227-8c48-feb2233f2675 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.461811] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e543d6d-db07-4677-80c8-fa72f0c83472 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.309782] env[63241]: DEBUG nova.compute.manager [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Stashing vm_state: active {{(pid=63241) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2100.828852] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.829143] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2101.334690] env[63241]: INFO nova.compute.claims [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2101.841401] env[63241]: INFO nova.compute.resource_tracker [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating resource usage from migration 8dcac58d-9798-4769-972b-7077fef32661 [ 2101.896400] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ab75bc-7994-4887-b3ee-647a20eb2ea7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.904557] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117abee2-fa8c-45b1-a06b-92a2dad95116 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.937556] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c04c454-9e52-492e-8a88-392859983a66 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.947502] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7439f9d6-a5ab-464e-994e-db73d5d5955c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.963266] env[63241]: DEBUG nova.compute.provider_tree [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2102.466648] env[63241]: DEBUG nova.scheduler.client.report [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2102.971557] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.142s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2102.972043] env[63241]: INFO nova.compute.manager [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Migrating [ 2103.487457] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2103.487655] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2103.487839] env[63241]: DEBUG nova.network.neutron [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2104.244378] env[63241]: DEBUG nova.network.neutron [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance_info_cache with network_info: [{"id": "861b579e-636a-499a-bf17-f0ef5804a903", "address": "fa:16:3e:20:9b:c0", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861b579e-63", "ovs_interfaceid": "861b579e-636a-499a-bf17-f0ef5804a903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2104.747568] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.061442] env[63241]: DEBUG nova.compute.manager [req-5de98b7e-5678-4c9e-8f4c-035de5c8183e req-edef0a68-d576-4328-9fc8-68859efee937 service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Received event network-vif-plugged-e992cfc6-fb5a-4aa4-be5a-70d949fa8157 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2106.061711] env[63241]: DEBUG oslo_concurrency.lockutils [req-5de98b7e-5678-4c9e-8f4c-035de5c8183e req-edef0a68-d576-4328-9fc8-68859efee937 service nova] Acquiring lock "db514556-34d3-4a55-97e6-69b848e8f2ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.061864] env[63241]: DEBUG oslo_concurrency.lockutils [req-5de98b7e-5678-4c9e-8f4c-035de5c8183e req-edef0a68-d576-4328-9fc8-68859efee937 service nova] Lock "db514556-34d3-4a55-97e6-69b848e8f2ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.062071] env[63241]: DEBUG oslo_concurrency.lockutils [req-5de98b7e-5678-4c9e-8f4c-035de5c8183e req-edef0a68-d576-4328-9fc8-68859efee937 service nova] Lock "db514556-34d3-4a55-97e6-69b848e8f2ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.062216] env[63241]: DEBUG nova.compute.manager [req-5de98b7e-5678-4c9e-8f4c-035de5c8183e req-edef0a68-d576-4328-9fc8-68859efee937 service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] No waiting events found dispatching network-vif-plugged-e992cfc6-fb5a-4aa4-be5a-70d949fa8157 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2106.062383] env[63241]: WARNING nova.compute.manager [req-5de98b7e-5678-4c9e-8f4c-035de5c8183e req-edef0a68-d576-4328-9fc8-68859efee937 service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Received unexpected event network-vif-plugged-e992cfc6-fb5a-4aa4-be5a-70d949fa8157 for instance with vm_state building and task_state spawning. [ 2106.148244] env[63241]: DEBUG nova.network.neutron [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Successfully updated port: e992cfc6-fb5a-4aa4-be5a-70d949fa8157 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2106.261909] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52190ebd-0e3c-462b-80fd-a8d2d5035169 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.281700] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance '642c896e-64f8-499c-8498-6ad756de8b70' progress to 0 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2106.650717] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquiring lock "refresh_cache-db514556-34d3-4a55-97e6-69b848e8f2ed" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.650978] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquired lock "refresh_cache-db514556-34d3-4a55-97e6-69b848e8f2ed" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2106.651089] env[63241]: DEBUG nova.network.neutron [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2106.787395] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2106.787959] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ea37c2a-eea1-481b-b5cf-9e984eab4a24 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.796784] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2106.796784] env[63241]: value = "task-1821421" [ 2106.796784] env[63241]: _type = "Task" [ 2106.796784] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.804311] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.181770] env[63241]: DEBUG nova.network.neutron [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2107.307177] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821421, 'name': PowerOffVM_Task, 'duration_secs': 0.177908} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.307482] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2107.307707] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance '642c896e-64f8-499c-8498-6ad756de8b70' progress to 17 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2107.333484] env[63241]: DEBUG nova.network.neutron [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Updating instance_info_cache with network_info: [{"id": "e992cfc6-fb5a-4aa4-be5a-70d949fa8157", "address": "fa:16:3e:63:bb:28", "network": {"id": "5c7976c1-81fb-4902-b8ba-e8353ad93400", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1830723079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8e46bdedc1148b3a631b0e1fa30c4cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape992cfc6-fb", "ovs_interfaceid": "e992cfc6-fb5a-4aa4-be5a-70d949fa8157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2107.815218] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2107.815507] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2107.815690] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2107.815879] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2107.816061] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2107.816225] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2107.816446] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2107.816603] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2107.816772] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2107.816957] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2107.817212] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2107.822953] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0164534-3fc9-4c26-bb4f-70aaf5ffa30a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.836010] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Releasing lock "refresh_cache-db514556-34d3-4a55-97e6-69b848e8f2ed" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2107.836336] env[63241]: DEBUG nova.compute.manager [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Instance network_info: |[{"id": "e992cfc6-fb5a-4aa4-be5a-70d949fa8157", "address": "fa:16:3e:63:bb:28", "network": {"id": "5c7976c1-81fb-4902-b8ba-e8353ad93400", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1830723079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8e46bdedc1148b3a631b0e1fa30c4cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape992cfc6-fb", "ovs_interfaceid": "e992cfc6-fb5a-4aa4-be5a-70d949fa8157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2107.836729] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:bb:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e992cfc6-fb5a-4aa4-be5a-70d949fa8157', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2107.844364] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Creating folder: Project (f8e46bdedc1148b3a631b0e1fa30c4cb). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2107.845817] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4edfd35-4b40-429a-8938-094f6ed6dfe5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.847576] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2107.847576] env[63241]: value = "task-1821422" [ 2107.847576] env[63241]: _type = "Task" [ 2107.847576] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.856468] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821422, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.857632] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Created folder: Project (f8e46bdedc1148b3a631b0e1fa30c4cb) in parent group-v376927. [ 2107.857808] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Creating folder: Instances. Parent ref: group-v377242. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2107.858094] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ac46cad-4567-405f-8f85-0ea7ca4051d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.866878] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Created folder: Instances in parent group-v377242. [ 2107.867144] env[63241]: DEBUG oslo.service.loopingcall [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2107.867337] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2107.867578] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e7c8196-e9f9-4660-90de-5436b4b40904 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.886563] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2107.886563] env[63241]: value = "task-1821425" [ 2107.886563] env[63241]: _type = "Task" [ 2107.886563] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.893762] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821425, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.090024] env[63241]: DEBUG nova.compute.manager [req-48314033-6f3e-496d-938e-0dc383b5dd67 req-98a13453-e1dd-4253-9de6-449812f346c9 service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Received event network-changed-e992cfc6-fb5a-4aa4-be5a-70d949fa8157 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2108.090252] env[63241]: DEBUG nova.compute.manager [req-48314033-6f3e-496d-938e-0dc383b5dd67 req-98a13453-e1dd-4253-9de6-449812f346c9 service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Refreshing instance network info cache due to event network-changed-e992cfc6-fb5a-4aa4-be5a-70d949fa8157. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2108.090522] env[63241]: DEBUG oslo_concurrency.lockutils [req-48314033-6f3e-496d-938e-0dc383b5dd67 req-98a13453-e1dd-4253-9de6-449812f346c9 service nova] Acquiring lock "refresh_cache-db514556-34d3-4a55-97e6-69b848e8f2ed" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2108.090692] env[63241]: DEBUG oslo_concurrency.lockutils [req-48314033-6f3e-496d-938e-0dc383b5dd67 req-98a13453-e1dd-4253-9de6-449812f346c9 service nova] Acquired lock "refresh_cache-db514556-34d3-4a55-97e6-69b848e8f2ed" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2108.090863] env[63241]: DEBUG nova.network.neutron [req-48314033-6f3e-496d-938e-0dc383b5dd67 req-98a13453-e1dd-4253-9de6-449812f346c9 service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Refreshing network info cache for port e992cfc6-fb5a-4aa4-be5a-70d949fa8157 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2108.358353] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821422, 'name': ReconfigVM_Task, 'duration_secs': 0.178236} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.358757] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance '642c896e-64f8-499c-8498-6ad756de8b70' progress to 33 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2108.395829] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821425, 'name': CreateVM_Task, 'duration_secs': 0.32891} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.396617] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2108.397289] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2108.397456] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2108.397772] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2108.398247] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f089fce-2aab-4d4e-9585-80146d230104 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.402473] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for the task: (returnval){ [ 2108.402473] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]523f9653-8d2d-87c4-aea0-d9d918a2f85d" [ 2108.402473] env[63241]: _type = "Task" [ 2108.402473] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.409892] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523f9653-8d2d-87c4-aea0-d9d918a2f85d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.786884] env[63241]: DEBUG nova.network.neutron [req-48314033-6f3e-496d-938e-0dc383b5dd67 req-98a13453-e1dd-4253-9de6-449812f346c9 service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Updated VIF entry in instance network info cache for port e992cfc6-fb5a-4aa4-be5a-70d949fa8157. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2108.787367] env[63241]: DEBUG nova.network.neutron [req-48314033-6f3e-496d-938e-0dc383b5dd67 req-98a13453-e1dd-4253-9de6-449812f346c9 service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Updating instance_info_cache with network_info: [{"id": "e992cfc6-fb5a-4aa4-be5a-70d949fa8157", "address": "fa:16:3e:63:bb:28", "network": {"id": "5c7976c1-81fb-4902-b8ba-e8353ad93400", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1830723079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8e46bdedc1148b3a631b0e1fa30c4cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape992cfc6-fb", "ovs_interfaceid": "e992cfc6-fb5a-4aa4-be5a-70d949fa8157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2108.864518] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2108.866049] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2108.866049] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2108.866049] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2108.866049] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2108.866049] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2108.866049] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2108.866049] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2108.866049] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2108.866049] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2108.866465] env[63241]: DEBUG nova.virt.hardware [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2108.871714] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Reconfiguring VM instance instance-00000078 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2108.872030] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aee22f5a-b24d-4d86-ba1d-31ba818ff96a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.891087] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2108.891087] env[63241]: value = "task-1821426" [ 2108.891087] env[63241]: _type = "Task" [ 2108.891087] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.898161] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821426, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.911152] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]523f9653-8d2d-87c4-aea0-d9d918a2f85d, 'name': SearchDatastore_Task, 'duration_secs': 0.009329} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.911432] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2108.911659] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2108.911926] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2108.912044] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2108.912228] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2108.912477] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7d24ef9-d06f-4f3c-a2e8-a81fac016690 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.919707] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2108.919890] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2108.920623] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0d880e3-b268-4147-afd7-8401760e68fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.925989] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for the task: (returnval){ [ 2108.925989] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52239d43-fce9-5dec-36f3-c8623afc4959" [ 2108.925989] env[63241]: _type = "Task" [ 2108.925989] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.933787] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52239d43-fce9-5dec-36f3-c8623afc4959, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.290076] env[63241]: DEBUG oslo_concurrency.lockutils [req-48314033-6f3e-496d-938e-0dc383b5dd67 req-98a13453-e1dd-4253-9de6-449812f346c9 service nova] Releasing lock "refresh_cache-db514556-34d3-4a55-97e6-69b848e8f2ed" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2109.401335] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821426, 'name': ReconfigVM_Task, 'duration_secs': 0.15652} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.401578] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Reconfigured VM instance instance-00000078 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2109.402354] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbcd9ea4-df68-42b0-a796-28697d6cc590 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.424163] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 642c896e-64f8-499c-8498-6ad756de8b70/642c896e-64f8-499c-8498-6ad756de8b70.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2109.424390] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b78efbf4-b3c2-4e43-a0b2-d73e81e0a83b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.444672] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52239d43-fce9-5dec-36f3-c8623afc4959, 'name': SearchDatastore_Task, 'duration_secs': 0.008796} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.446278] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2109.446278] env[63241]: value = "task-1821427" [ 2109.446278] env[63241]: _type = "Task" [ 2109.446278] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.446506] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f2dc989-1779-4179-8f68-09627c0268f9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.453716] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for the task: (returnval){ [ 2109.453716] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5208af01-8719-86d6-dfaf-7e1306d3add9" [ 2109.453716] env[63241]: _type = "Task" [ 2109.453716] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.456654] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821427, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.465009] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5208af01-8719-86d6-dfaf-7e1306d3add9, 'name': SearchDatastore_Task, 'duration_secs': 0.009421} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.465271] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2109.465516] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] db514556-34d3-4a55-97e6-69b848e8f2ed/db514556-34d3-4a55-97e6-69b848e8f2ed.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2109.465734] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-995f8c7b-2378-4042-8225-f3f9914a6697 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.471161] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for the task: (returnval){ [ 2109.471161] env[63241]: value = "task-1821428" [ 2109.471161] env[63241]: _type = "Task" [ 2109.471161] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.478134] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821428, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.958488] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821427, 'name': ReconfigVM_Task, 'duration_secs': 0.467687} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.958595] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 642c896e-64f8-499c-8498-6ad756de8b70/642c896e-64f8-499c-8498-6ad756de8b70.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2109.959666] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance '642c896e-64f8-499c-8498-6ad756de8b70' progress to 50 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2109.980022] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821428, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.394685} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.980167] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] db514556-34d3-4a55-97e6-69b848e8f2ed/db514556-34d3-4a55-97e6-69b848e8f2ed.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2109.980412] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2109.980678] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d485041e-632f-4042-ad9f-7117069424e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.989075] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for the task: (returnval){ [ 2109.989075] env[63241]: value = "task-1821429" [ 2109.989075] env[63241]: _type = "Task" [ 2109.989075] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.996890] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821429, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.466050] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d943eab-c610-4da0-9f38-4fc9b8ab6c00 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.484978] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a803dd29-ffee-4c5e-9504-57729b17729f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.504878] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance '642c896e-64f8-499c-8498-6ad756de8b70' progress to 67 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2110.512525] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821429, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.056328} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.512758] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2110.513544] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a9a6da-e9ee-45ea-bd0c-13a8bb17895f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.535277] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] db514556-34d3-4a55-97e6-69b848e8f2ed/db514556-34d3-4a55-97e6-69b848e8f2ed.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2110.535502] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a9319c0-798c-462e-8bba-8a1e83bf1ef8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.554735] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for the task: (returnval){ [ 2110.554735] env[63241]: value = "task-1821430" [ 2110.554735] env[63241]: _type = "Task" [ 2110.554735] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.562395] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821430, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.044328] env[63241]: DEBUG nova.network.neutron [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Port 861b579e-636a-499a-bf17-f0ef5804a903 binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2111.065490] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821430, 'name': ReconfigVM_Task, 'duration_secs': 0.262377} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.065798] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Reconfigured VM instance instance-00000079 to attach disk [datastore1] db514556-34d3-4a55-97e6-69b848e8f2ed/db514556-34d3-4a55-97e6-69b848e8f2ed.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2111.066458] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac67b301-66f7-470a-aeb3-1525c32e3ae5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.072738] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for the task: (returnval){ [ 2111.072738] env[63241]: value = "task-1821431" [ 2111.072738] env[63241]: _type = "Task" [ 2111.072738] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.080761] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821431, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.583158] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821431, 'name': Rename_Task, 'duration_secs': 0.141354} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.583629] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2111.583705] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf458a4b-ed2f-4936-9907-7054b50aeb0e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.589850] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for the task: (returnval){ [ 2111.589850] env[63241]: value = "task-1821432" [ 2111.589850] env[63241]: _type = "Task" [ 2111.589850] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.598376] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821432, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.068284] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "642c896e-64f8-499c-8498-6ad756de8b70-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.068525] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "642c896e-64f8-499c-8498-6ad756de8b70-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.068697] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "642c896e-64f8-499c-8498-6ad756de8b70-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.100494] env[63241]: DEBUG oslo_vmware.api [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821432, 'name': PowerOnVM_Task, 'duration_secs': 0.416194} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.100670] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2112.100730] env[63241]: INFO nova.compute.manager [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Took 12.67 seconds to spawn the instance on the hypervisor. [ 2112.100879] env[63241]: DEBUG nova.compute.manager [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2112.101840] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489525f2-a057-4dd4-9ec9-e4068a01ad39 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.619904] env[63241]: INFO nova.compute.manager [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Took 17.35 seconds to build instance. [ 2113.112471] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2113.112739] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2113.112848] env[63241]: DEBUG nova.network.neutron [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2113.121560] env[63241]: DEBUG oslo_concurrency.lockutils [None req-574906f6-9719-498d-a8ef-3278a209da56 tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Lock "db514556-34d3-4a55-97e6-69b848e8f2ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.854s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.642103] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquiring lock "db514556-34d3-4a55-97e6-69b848e8f2ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2113.642374] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Lock "db514556-34d3-4a55-97e6-69b848e8f2ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.642584] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquiring lock "db514556-34d3-4a55-97e6-69b848e8f2ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2113.642767] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Lock "db514556-34d3-4a55-97e6-69b848e8f2ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.642944] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Lock "db514556-34d3-4a55-97e6-69b848e8f2ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.647243] env[63241]: INFO nova.compute.manager [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Terminating instance [ 2113.649104] env[63241]: DEBUG nova.compute.manager [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2113.649309] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2113.650164] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a81d22-c707-4cd5-bfd0-c47608135624 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.658227] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2113.658457] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba3dded0-57a0-41a4-a6b4-df66955b8047 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.664705] env[63241]: DEBUG oslo_vmware.api [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for the task: (returnval){ [ 2113.664705] env[63241]: value = "task-1821433" [ 2113.664705] env[63241]: _type = "Task" [ 2113.664705] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.672924] env[63241]: DEBUG oslo_vmware.api [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821433, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.861561] env[63241]: DEBUG nova.network.neutron [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance_info_cache with network_info: [{"id": "861b579e-636a-499a-bf17-f0ef5804a903", "address": "fa:16:3e:20:9b:c0", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861b579e-63", "ovs_interfaceid": "861b579e-636a-499a-bf17-f0ef5804a903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2114.174640] env[63241]: DEBUG oslo_vmware.api [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821433, 'name': PowerOffVM_Task, 'duration_secs': 0.208503} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.174897] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2114.175089] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2114.175343] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3951ac6-2772-4652-ab0c-065b3ca09add {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.223556] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2114.223785] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2114.223947] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2114.365104] env[63241]: DEBUG oslo_concurrency.lockutils [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2114.380783] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2114.380999] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2114.381201] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Deleting the datastore file [datastore1] db514556-34d3-4a55-97e6-69b848e8f2ed {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2114.381463] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d82529d7-cf6d-47c5-b2bf-6d939e4d13d4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.388607] env[63241]: DEBUG oslo_vmware.api [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for the task: (returnval){ [ 2114.388607] env[63241]: value = "task-1821435" [ 2114.388607] env[63241]: _type = "Task" [ 2114.388607] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.396177] env[63241]: DEBUG oslo_vmware.api [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821435, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.452128] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2114.893437] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf18bae-3e16-4840-b173-a773e525c7dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.901230] env[63241]: DEBUG oslo_vmware.api [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Task: {'id': task-1821435, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14684} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.914138] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2114.914329] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2114.914501] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2114.914671] env[63241]: INFO nova.compute.manager [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Took 1.27 seconds to destroy the instance on the hypervisor. [ 2114.914900] env[63241]: DEBUG oslo.service.loopingcall [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2114.915213] env[63241]: DEBUG nova.compute.manager [-] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2114.915310] env[63241]: DEBUG nova.network.neutron [-] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2114.917217] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42bc562-6adf-4562-b3e1-f4f0304ebc73 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.924067] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance '642c896e-64f8-499c-8498-6ad756de8b70' progress to 83 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2115.140223] env[63241]: DEBUG nova.compute.manager [req-dd8719fc-000b-4843-893a-0bd37429b697 req-052a35f9-a95b-4010-b065-85ca5e30603c service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Received event network-vif-deleted-e992cfc6-fb5a-4aa4-be5a-70d949fa8157 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2115.140405] env[63241]: INFO nova.compute.manager [req-dd8719fc-000b-4843-893a-0bd37429b697 req-052a35f9-a95b-4010-b065-85ca5e30603c service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Neutron deleted interface e992cfc6-fb5a-4aa4-be5a-70d949fa8157; detaching it from the instance and deleting it from the info cache [ 2115.140584] env[63241]: DEBUG nova.network.neutron [req-dd8719fc-000b-4843-893a-0bd37429b697 req-052a35f9-a95b-4010-b065-85ca5e30603c service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.432825] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2115.433098] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdd4063e-8e51-4623-95e8-67b292759334 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.440890] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2115.440890] env[63241]: value = "task-1821436" [ 2115.440890] env[63241]: _type = "Task" [ 2115.440890] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.450129] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821436, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.618871] env[63241]: DEBUG nova.network.neutron [-] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.643452] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eda3af79-cab8-4287-8b7b-3fddec817760 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.654620] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc6e1a1-cb9d-40c4-a9e4-b23f0d3f6f82 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.678776] env[63241]: DEBUG nova.compute.manager [req-dd8719fc-000b-4843-893a-0bd37429b697 req-052a35f9-a95b-4010-b065-85ca5e30603c service nova] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Detach interface failed, port_id=e992cfc6-fb5a-4aa4-be5a-70d949fa8157, reason: Instance db514556-34d3-4a55-97e6-69b848e8f2ed could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2115.952392] env[63241]: DEBUG oslo_vmware.api [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821436, 'name': PowerOnVM_Task, 'duration_secs': 0.403694} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.952874] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2115.952874] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-eb57208f-1524-44b1-94a2-f16adb827cb9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance '642c896e-64f8-499c-8498-6ad756de8b70' progress to 100 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2116.122056] env[63241]: INFO nova.compute.manager [-] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Took 1.21 seconds to deallocate network for instance. [ 2116.451940] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2116.452144] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2116.628674] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.628939] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2116.629177] env[63241]: DEBUG nova.objects.instance [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Lazy-loading 'resources' on Instance uuid db514556-34d3-4a55-97e6-69b848e8f2ed {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2117.182245] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86c21e1-7314-4869-8a04-d1e7d6303ace {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.189589] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259bde1f-50f7-4101-b69c-a7039fb28bbb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.218776] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec9ad97-f7d2-4ae4-800f-296d2ad2be15 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.225704] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7026e5-f7e5-4e17-9b07-578ca27d8872 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.239606] env[63241]: DEBUG nova.compute.provider_tree [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2117.457203] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2117.742788] env[63241]: DEBUG nova.scheduler.client.report [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2118.248674] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.619s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.267029] env[63241]: INFO nova.scheduler.client.report [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Deleted allocations for instance db514556-34d3-4a55-97e6-69b848e8f2ed [ 2118.679874] env[63241]: DEBUG nova.network.neutron [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Port 861b579e-636a-499a-bf17-f0ef5804a903 binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2118.680160] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.680320] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.680484] env[63241]: DEBUG nova.network.neutron [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2118.777220] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b398a278-c6bb-4f68-be25-66830313a3ad tempest-ServerMetadataTestJSON-1262410799 tempest-ServerMetadataTestJSON-1262410799-project-member] Lock "db514556-34d3-4a55-97e6-69b848e8f2ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.134s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.451198] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2119.451494] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2119.451552] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2119.575405] env[63241]: DEBUG nova.network.neutron [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance_info_cache with network_info: [{"id": "861b579e-636a-499a-bf17-f0ef5804a903", "address": "fa:16:3e:20:9b:c0", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861b579e-63", "ovs_interfaceid": "861b579e-636a-499a-bf17-f0ef5804a903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2120.080031] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2120.452613] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2120.582055] env[63241]: DEBUG nova.compute.manager [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63241) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 2120.582278] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2120.582513] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2121.086853] env[63241]: DEBUG nova.objects.instance [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'migration_context' on Instance uuid 642c896e-64f8-499c-8498-6ad756de8b70 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2121.636772] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2747a2d-bb56-4e2b-8b5b-8bbae687c5be {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.644917] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f67dad-6232-40d9-9e55-d65e1c641628 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.674899] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0183d8-80df-42fb-8dc1-b0899030d439 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.681800] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e24a87-c642-41a2-9d3a-a3daf32f2ce8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.694945] env[63241]: DEBUG nova.compute.provider_tree [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2122.198560] env[63241]: DEBUG nova.scheduler.client.report [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2123.211440] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.629s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.452071] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2124.746331] env[63241]: INFO nova.compute.manager [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Swapping old allocation on dict_keys(['9a5e30eb-ceae-4224-aa66-dcbfa98ce24b']) held by migration 8dcac58d-9798-4769-972b-7077fef32661 for instance [ 2124.766938] env[63241]: DEBUG nova.scheduler.client.report [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Overwriting current allocation {'allocations': {'9a5e30eb-ceae-4224-aa66-dcbfa98ce24b': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 186}}, 'project_id': '855da29218ba4391a208e2835f60ee11', 'user_id': 'c00391826fd242709ad7947610554fc2', 'consumer_generation': 1} on consumer 642c896e-64f8-499c-8498-6ad756de8b70 {{(pid=63241) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 2124.842133] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2124.842334] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2124.842509] env[63241]: DEBUG nova.network.neutron [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2124.954693] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.954937] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.955124] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.955294] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2124.956279] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68cb28e-fe17-482f-aac0-1604b088758f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.964592] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5f03d2-161c-4e55-99cb-f28a14f6d46a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.978456] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba3f00f-dd4f-4e32-9c5a-559a8e1ac96a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.985121] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e006bd05-a9d9-4268-bdaf-a374d4cb1a9c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.012931] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180346MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2125.013047] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.013223] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.558860] env[63241]: DEBUG nova.network.neutron [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance_info_cache with network_info: [{"id": "861b579e-636a-499a-bf17-f0ef5804a903", "address": "fa:16:3e:20:9b:c0", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap861b579e-63", "ovs_interfaceid": "861b579e-636a-499a-bf17-f0ef5804a903", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.032889] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 642c896e-64f8-499c-8498-6ad756de8b70 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2126.033114] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2126.033262] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2126.056380] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ee8674-7d9b-433e-835e-2fcbeac8432a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.060902] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-642c896e-64f8-499c-8498-6ad756de8b70" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.061352] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2126.061606] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a83776b2-f699-4573-b14a-309469920260 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.065812] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a20b36-cd46-4ff0-a124-42f08b57f5ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.069950] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2126.069950] env[63241]: value = "task-1821437" [ 2126.069950] env[63241]: _type = "Task" [ 2126.069950] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.097867] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd7f7a3-e5ac-4f3e-9e40-819a0cdddaaa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.103326] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821437, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.108091] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b9dc76-650b-4edb-a40b-23cb549ff1c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.120793] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2126.579581] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821437, 'name': PowerOffVM_Task, 'duration_secs': 0.161099} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.579951] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2126.580499] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2126.580713] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2126.580868] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2126.581066] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2126.581214] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2126.581356] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2126.581558] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2126.581718] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2126.581883] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2126.582057] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2126.582247] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2126.587258] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4411fc9a-831e-436a-854d-2fd1f678c8db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.602247] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2126.602247] env[63241]: value = "task-1821438" [ 2126.602247] env[63241]: _type = "Task" [ 2126.602247] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.609645] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821438, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.623487] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2127.112247] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821438, 'name': ReconfigVM_Task, 'duration_secs': 0.137144} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.113089] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d83c7f7-11e2-463b-a869-c28e58dde13e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.133035] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2127.133035] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.119s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.133286] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2127.133514] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2127.133677] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2127.133857] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2127.134008] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2127.134162] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2127.134362] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2127.134517] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2127.134681] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2127.134842] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2127.135019] env[63241]: DEBUG nova.virt.hardware [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2127.135903] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fad2162-ceaa-409b-b7d9-8c4cd5b6b272 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.141754] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2127.141754] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52abc6cd-4af8-8a05-c2c8-9b5a3aeff74a" [ 2127.141754] env[63241]: _type = "Task" [ 2127.141754] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.149011] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52abc6cd-4af8-8a05-c2c8-9b5a3aeff74a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.653194] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52abc6cd-4af8-8a05-c2c8-9b5a3aeff74a, 'name': SearchDatastore_Task, 'duration_secs': 0.009349} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.659284] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Reconfiguring VM instance instance-00000078 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2127.659582] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9042ba8-a52b-4e61-9c25-b7d9a33c02c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.678736] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2127.678736] env[63241]: value = "task-1821439" [ 2127.678736] env[63241]: _type = "Task" [ 2127.678736] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.686850] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821439, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.189158] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821439, 'name': ReconfigVM_Task, 'duration_secs': 0.29746} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.189439] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Reconfigured VM instance instance-00000078 to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2128.190222] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4dd570-49ef-403b-8782-be2cec0dd55a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.211801] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 642c896e-64f8-499c-8498-6ad756de8b70/642c896e-64f8-499c-8498-6ad756de8b70.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2128.212076] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ab79889-d48e-4bdf-803f-1fd71fbcfe12 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.230129] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2128.230129] env[63241]: value = "task-1821440" [ 2128.230129] env[63241]: _type = "Task" [ 2128.230129] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.237967] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.739707] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821440, 'name': ReconfigVM_Task, 'duration_secs': 0.252934} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.740127] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 642c896e-64f8-499c-8498-6ad756de8b70/642c896e-64f8-499c-8498-6ad756de8b70.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2128.740817] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113bcdc0-e737-4fc9-af16-ae34d8eb948d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.758383] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02294cd4-a308-4d50-b822-e1aed3eaaa79 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.776686] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b8704f-3c15-4f25-a4a7-7a244e3d25b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.793637] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1d1d16-fe79-4227-9e2b-b21aa66f29ee {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.799910] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2128.800143] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-537c5909-6e8e-4fc7-8de2-e683452401e5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.806253] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2128.806253] env[63241]: value = "task-1821441" [ 2128.806253] env[63241]: _type = "Task" [ 2128.806253] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.814116] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821441, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.315795] env[63241]: DEBUG oslo_vmware.api [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821441, 'name': PowerOnVM_Task, 'duration_secs': 0.344328} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.316091] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2130.326883] env[63241]: INFO nova.compute.manager [None req-f09bab01-814f-4abf-9af1-8fb687e710ca tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance to original state: 'active' [ 2131.211133] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "642c896e-64f8-499c-8498-6ad756de8b70" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.211446] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "642c896e-64f8-499c-8498-6ad756de8b70" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.211686] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "642c896e-64f8-499c-8498-6ad756de8b70-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.212280] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "642c896e-64f8-499c-8498-6ad756de8b70-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.212466] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "642c896e-64f8-499c-8498-6ad756de8b70-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.214597] env[63241]: INFO nova.compute.manager [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Terminating instance [ 2131.216441] env[63241]: DEBUG nova.compute.manager [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2131.216665] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2131.217542] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d828657c-93c9-49b4-9696-f1211794ec20 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.224987] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2131.225267] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49f5671f-5d81-4a93-866f-669c6d89b89a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.231490] env[63241]: DEBUG oslo_vmware.api [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2131.231490] env[63241]: value = "task-1821442" [ 2131.231490] env[63241]: _type = "Task" [ 2131.231490] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2131.238972] env[63241]: DEBUG oslo_vmware.api [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.741098] env[63241]: DEBUG oslo_vmware.api [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821442, 'name': PowerOffVM_Task, 'duration_secs': 0.185784} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.741463] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2131.741585] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2131.741772] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9dc6a771-0c63-4b86-814d-c0a4e1a5079e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.816357] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2131.816579] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2131.816826] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleting the datastore file [datastore1] 642c896e-64f8-499c-8498-6ad756de8b70 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2131.817132] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a4fa797-434d-44d0-9bf6-535a043aec9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.825010] env[63241]: DEBUG oslo_vmware.api [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2131.825010] env[63241]: value = "task-1821444" [ 2131.825010] env[63241]: _type = "Task" [ 2131.825010] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2131.832336] env[63241]: DEBUG oslo_vmware.api [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821444, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.335219] env[63241]: DEBUG oslo_vmware.api [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821444, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183143} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2132.335414] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2132.335626] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2132.335829] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2132.335999] env[63241]: INFO nova.compute.manager [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2132.336251] env[63241]: DEBUG oslo.service.loopingcall [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2132.336438] env[63241]: DEBUG nova.compute.manager [-] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2132.336555] env[63241]: DEBUG nova.network.neutron [-] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2132.782871] env[63241]: DEBUG nova.compute.manager [req-da0c6271-8ee1-44ca-9e19-df4aebb3ebc8 req-1fbe5f32-537b-43f0-9b30-25a9a5ff3c90 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Received event network-vif-deleted-861b579e-636a-499a-bf17-f0ef5804a903 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2132.783124] env[63241]: INFO nova.compute.manager [req-da0c6271-8ee1-44ca-9e19-df4aebb3ebc8 req-1fbe5f32-537b-43f0-9b30-25a9a5ff3c90 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Neutron deleted interface 861b579e-636a-499a-bf17-f0ef5804a903; detaching it from the instance and deleting it from the info cache [ 2132.783350] env[63241]: DEBUG nova.network.neutron [req-da0c6271-8ee1-44ca-9e19-df4aebb3ebc8 req-1fbe5f32-537b-43f0-9b30-25a9a5ff3c90 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.258749] env[63241]: DEBUG nova.network.neutron [-] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.286606] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95f23c46-79d2-4245-ac3f-e3f06962570d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.296545] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1253e7-fa37-4a22-b6bc-885815a7d007 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.319256] env[63241]: DEBUG nova.compute.manager [req-da0c6271-8ee1-44ca-9e19-df4aebb3ebc8 req-1fbe5f32-537b-43f0-9b30-25a9a5ff3c90 service nova] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Detach interface failed, port_id=861b579e-636a-499a-bf17-f0ef5804a903, reason: Instance 642c896e-64f8-499c-8498-6ad756de8b70 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2133.761324] env[63241]: INFO nova.compute.manager [-] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Took 1.42 seconds to deallocate network for instance. [ 2134.269169] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.269643] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.269643] env[63241]: DEBUG nova.objects.instance [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'resources' on Instance uuid 642c896e-64f8-499c-8498-6ad756de8b70 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2134.805287] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d547727d-6a8f-4086-b05b-9bded12e472d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.813284] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3ebfd8-c60b-451b-8d9f-fde9aa4dcf89 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.846768] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb54b63c-3f27-45f8-847c-f7bd6e33c846 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.854801] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5dacbc-4640-4789-9854-0f0e0f945ccc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.868467] env[63241]: DEBUG nova.compute.provider_tree [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2135.388260] env[63241]: ERROR nova.scheduler.client.report [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [req-3716a7c8-34d3-4096-8359-e226c4820d30] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3716a7c8-34d3-4096-8359-e226c4820d30"}]} [ 2135.403094] env[63241]: DEBUG nova.scheduler.client.report [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2135.415286] env[63241]: DEBUG nova.scheduler.client.report [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2135.415498] env[63241]: DEBUG nova.compute.provider_tree [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2135.424853] env[63241]: DEBUG nova.scheduler.client.report [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2135.441249] env[63241]: DEBUG nova.scheduler.client.report [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2135.462155] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc024b2-7db0-4e79-9706-482763398653 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.469773] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cff840d-ed51-4daa-8a12-02b13636b105 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.498803] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ae0950-889e-4738-b95e-70345e3667e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.505710] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0896e811-4758-4c14-85cd-47ae0ef74244 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.519557] env[63241]: DEBUG nova.compute.provider_tree [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2136.048639] env[63241]: DEBUG nova.scheduler.client.report [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 187 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2136.048918] env[63241]: DEBUG nova.compute.provider_tree [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 187 to 188 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2136.049110] env[63241]: DEBUG nova.compute.provider_tree [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2136.553544] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.284s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2136.571115] env[63241]: INFO nova.scheduler.client.report [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleted allocations for instance 642c896e-64f8-499c-8498-6ad756de8b70 [ 2137.079570] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ca6f148-4e1b-4e67-b7f0-7ab22eef5e94 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "642c896e-64f8-499c-8498-6ad756de8b70" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.868s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2138.197315] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "55267f9f-66bd-4298-8ac6-19bebe71c05e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2138.197617] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "55267f9f-66bd-4298-8ac6-19bebe71c05e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2138.700644] env[63241]: DEBUG nova.compute.manager [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2139.221725] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2139.222126] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.224355] env[63241]: INFO nova.compute.claims [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2140.259834] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a85cc61-69d5-4489-a861-bbe29f4661d6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.267749] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efb8c1d-f0bf-4256-a3af-1309a3c6fdc1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.298703] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbf05cc-1f01-46b1-95d8-6d0eb8bdabd6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.306011] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211ba53e-882c-4525-b622-b813e3d26855 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.318809] env[63241]: DEBUG nova.compute.provider_tree [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2140.822144] env[63241]: DEBUG nova.scheduler.client.report [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2141.327267] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.105s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.327811] env[63241]: DEBUG nova.compute.manager [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2141.832581] env[63241]: DEBUG nova.compute.utils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2141.834114] env[63241]: DEBUG nova.compute.manager [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2141.834290] env[63241]: DEBUG nova.network.neutron [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2141.882140] env[63241]: DEBUG nova.policy [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c00391826fd242709ad7947610554fc2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '855da29218ba4391a208e2835f60ee11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 2142.153999] env[63241]: DEBUG nova.network.neutron [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Successfully created port: 3f0a6003-8737-433a-9490-078edacc86c3 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2142.337561] env[63241]: DEBUG nova.compute.manager [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2143.347554] env[63241]: DEBUG nova.compute.manager [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2143.378824] env[63241]: DEBUG nova.virt.hardware [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2143.379121] env[63241]: DEBUG nova.virt.hardware [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2143.379284] env[63241]: DEBUG nova.virt.hardware [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2143.379473] env[63241]: DEBUG nova.virt.hardware [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2143.379623] env[63241]: DEBUG nova.virt.hardware [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2143.379880] env[63241]: DEBUG nova.virt.hardware [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2143.380112] env[63241]: DEBUG nova.virt.hardware [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2143.380278] env[63241]: DEBUG nova.virt.hardware [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2143.380450] env[63241]: DEBUG nova.virt.hardware [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2143.380639] env[63241]: DEBUG nova.virt.hardware [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2143.380840] env[63241]: DEBUG nova.virt.hardware [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2143.381735] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d8f64f-17db-4fa2-bac9-937867b321da {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.389973] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf3dbd1-d139-4e25-bb6f-734d9178a535 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.526096] env[63241]: DEBUG nova.compute.manager [req-71f4749f-f698-4d29-a54f-fe25443bb17e req-98a8a8a7-72b6-49a0-a096-38b8393d60f1 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Received event network-vif-plugged-3f0a6003-8737-433a-9490-078edacc86c3 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2143.526280] env[63241]: DEBUG oslo_concurrency.lockutils [req-71f4749f-f698-4d29-a54f-fe25443bb17e req-98a8a8a7-72b6-49a0-a096-38b8393d60f1 service nova] Acquiring lock "55267f9f-66bd-4298-8ac6-19bebe71c05e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.526502] env[63241]: DEBUG oslo_concurrency.lockutils [req-71f4749f-f698-4d29-a54f-fe25443bb17e req-98a8a8a7-72b6-49a0-a096-38b8393d60f1 service nova] Lock "55267f9f-66bd-4298-8ac6-19bebe71c05e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.526684] env[63241]: DEBUG oslo_concurrency.lockutils [req-71f4749f-f698-4d29-a54f-fe25443bb17e req-98a8a8a7-72b6-49a0-a096-38b8393d60f1 service nova] Lock "55267f9f-66bd-4298-8ac6-19bebe71c05e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.526852] env[63241]: DEBUG nova.compute.manager [req-71f4749f-f698-4d29-a54f-fe25443bb17e req-98a8a8a7-72b6-49a0-a096-38b8393d60f1 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] No waiting events found dispatching network-vif-plugged-3f0a6003-8737-433a-9490-078edacc86c3 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2143.527063] env[63241]: WARNING nova.compute.manager [req-71f4749f-f698-4d29-a54f-fe25443bb17e req-98a8a8a7-72b6-49a0-a096-38b8393d60f1 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Received unexpected event network-vif-plugged-3f0a6003-8737-433a-9490-078edacc86c3 for instance with vm_state building and task_state spawning. [ 2143.607659] env[63241]: DEBUG nova.network.neutron [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Successfully updated port: 3f0a6003-8737-433a-9490-078edacc86c3 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2144.112708] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2144.113991] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2144.113991] env[63241]: DEBUG nova.network.neutron [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2144.643974] env[63241]: DEBUG nova.network.neutron [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2144.764361] env[63241]: DEBUG nova.network.neutron [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Updating instance_info_cache with network_info: [{"id": "3f0a6003-8737-433a-9490-078edacc86c3", "address": "fa:16:3e:93:e5:dd", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0a6003-87", "ovs_interfaceid": "3f0a6003-8737-433a-9490-078edacc86c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2145.267448] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2145.267759] env[63241]: DEBUG nova.compute.manager [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Instance network_info: |[{"id": "3f0a6003-8737-433a-9490-078edacc86c3", "address": "fa:16:3e:93:e5:dd", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0a6003-87", "ovs_interfaceid": "3f0a6003-8737-433a-9490-078edacc86c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2145.268189] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:e5:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f0a6003-8737-433a-9490-078edacc86c3', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2145.275659] env[63241]: DEBUG oslo.service.loopingcall [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2145.275855] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2145.276092] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86a220f4-1b04-4da8-ad40-c2ec4c5a390e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.298191] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2145.298191] env[63241]: value = "task-1821445" [ 2145.298191] env[63241]: _type = "Task" [ 2145.298191] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.305673] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821445, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.550218] env[63241]: DEBUG nova.compute.manager [req-0248cf0e-e04a-47a2-9675-86332bcf746e req-fe0a5e93-0b43-46b4-a66e-f83f4e6fc4b4 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Received event network-changed-3f0a6003-8737-433a-9490-078edacc86c3 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2145.550422] env[63241]: DEBUG nova.compute.manager [req-0248cf0e-e04a-47a2-9675-86332bcf746e req-fe0a5e93-0b43-46b4-a66e-f83f4e6fc4b4 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Refreshing instance network info cache due to event network-changed-3f0a6003-8737-433a-9490-078edacc86c3. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2145.550652] env[63241]: DEBUG oslo_concurrency.lockutils [req-0248cf0e-e04a-47a2-9675-86332bcf746e req-fe0a5e93-0b43-46b4-a66e-f83f4e6fc4b4 service nova] Acquiring lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2145.550803] env[63241]: DEBUG oslo_concurrency.lockutils [req-0248cf0e-e04a-47a2-9675-86332bcf746e req-fe0a5e93-0b43-46b4-a66e-f83f4e6fc4b4 service nova] Acquired lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2145.550975] env[63241]: DEBUG nova.network.neutron [req-0248cf0e-e04a-47a2-9675-86332bcf746e req-fe0a5e93-0b43-46b4-a66e-f83f4e6fc4b4 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Refreshing network info cache for port 3f0a6003-8737-433a-9490-078edacc86c3 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2145.808832] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821445, 'name': CreateVM_Task, 'duration_secs': 0.282271} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2145.809174] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2145.809963] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2145.810146] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2145.810486] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2145.810740] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff4bd29a-ddcf-44d7-a1cf-fbf7f0e12e6c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.815085] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2145.815085] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]524bbbad-fd31-b6bf-2962-fba9e130ffb5" [ 2145.815085] env[63241]: _type = "Task" [ 2145.815085] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.823716] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524bbbad-fd31-b6bf-2962-fba9e130ffb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.258818] env[63241]: DEBUG nova.network.neutron [req-0248cf0e-e04a-47a2-9675-86332bcf746e req-fe0a5e93-0b43-46b4-a66e-f83f4e6fc4b4 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Updated VIF entry in instance network info cache for port 3f0a6003-8737-433a-9490-078edacc86c3. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2146.259210] env[63241]: DEBUG nova.network.neutron [req-0248cf0e-e04a-47a2-9675-86332bcf746e req-fe0a5e93-0b43-46b4-a66e-f83f4e6fc4b4 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Updating instance_info_cache with network_info: [{"id": "3f0a6003-8737-433a-9490-078edacc86c3", "address": "fa:16:3e:93:e5:dd", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0a6003-87", "ovs_interfaceid": "3f0a6003-8737-433a-9490-078edacc86c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2146.325907] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]524bbbad-fd31-b6bf-2962-fba9e130ffb5, 'name': SearchDatastore_Task, 'duration_secs': 0.009977} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.326206] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2146.326452] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2146.326702] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2146.326850] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2146.327039] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2146.327288] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0e1390c-1334-43ce-9406-92862eef439a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.335109] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2146.335252] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2146.335929] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2d22333-3770-457e-8637-4ced605c6b8f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.340402] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2146.340402] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]525c5f98-4db3-080b-621c-36636bb38cba" [ 2146.340402] env[63241]: _type = "Task" [ 2146.340402] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.348813] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525c5f98-4db3-080b-621c-36636bb38cba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.761697] env[63241]: DEBUG oslo_concurrency.lockutils [req-0248cf0e-e04a-47a2-9675-86332bcf746e req-fe0a5e93-0b43-46b4-a66e-f83f4e6fc4b4 service nova] Releasing lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2146.850476] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]525c5f98-4db3-080b-621c-36636bb38cba, 'name': SearchDatastore_Task, 'duration_secs': 0.00829} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.851230] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32575a6d-118e-406d-9419-f67688f8cbf0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.856074] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2146.856074] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52732b7b-f81c-1455-8afa-234e804a3373" [ 2146.856074] env[63241]: _type = "Task" [ 2146.856074] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.862983] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52732b7b-f81c-1455-8afa-234e804a3373, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.367056] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52732b7b-f81c-1455-8afa-234e804a3373, 'name': SearchDatastore_Task, 'duration_secs': 0.008928} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.367335] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2147.367586] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 55267f9f-66bd-4298-8ac6-19bebe71c05e/55267f9f-66bd-4298-8ac6-19bebe71c05e.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2147.367863] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50776696-60f5-457d-a99d-c2c50891508e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.374173] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2147.374173] env[63241]: value = "task-1821446" [ 2147.374173] env[63241]: _type = "Task" [ 2147.374173] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.381296] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821446, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.883573] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821446, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.400925} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.883984] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 55267f9f-66bd-4298-8ac6-19bebe71c05e/55267f9f-66bd-4298-8ac6-19bebe71c05e.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2147.884152] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2147.884260] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16e788ea-a32d-4fce-aef1-f79fc087b7fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.890710] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2147.890710] env[63241]: value = "task-1821447" [ 2147.890710] env[63241]: _type = "Task" [ 2147.890710] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.897584] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821447, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.400828] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821447, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05942} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.401111] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2148.401903] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a24d5dc-6fab-4e64-93ec-0d376cf5d5f3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.423161] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 55267f9f-66bd-4298-8ac6-19bebe71c05e/55267f9f-66bd-4298-8ac6-19bebe71c05e.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2148.423401] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8c86cd7-143d-4c9f-982b-a7a2c0ac49d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.442729] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2148.442729] env[63241]: value = "task-1821448" [ 2148.442729] env[63241]: _type = "Task" [ 2148.442729] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.450072] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821448, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.952693] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821448, 'name': ReconfigVM_Task, 'duration_secs': 0.255774} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.953173] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 55267f9f-66bd-4298-8ac6-19bebe71c05e/55267f9f-66bd-4298-8ac6-19bebe71c05e.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2148.953604] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-106b6870-a4aa-4c94-98b0-afe6cbf83ebc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.959903] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2148.959903] env[63241]: value = "task-1821449" [ 2148.959903] env[63241]: _type = "Task" [ 2148.959903] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.967013] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821449, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.469886] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821449, 'name': Rename_Task, 'duration_secs': 0.139327} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.470174] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2149.470415] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a94a2dc-978d-4b61-8e1d-adc977a29340 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.476894] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2149.476894] env[63241]: value = "task-1821450" [ 2149.476894] env[63241]: _type = "Task" [ 2149.476894] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.484601] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821450, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.988378] env[63241]: DEBUG oslo_vmware.api [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821450, 'name': PowerOnVM_Task, 'duration_secs': 0.445553} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.988780] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2149.988839] env[63241]: INFO nova.compute.manager [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Took 6.64 seconds to spawn the instance on the hypervisor. [ 2149.988987] env[63241]: DEBUG nova.compute.manager [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2149.989729] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee915b73-e51d-4d55-b845-3e22bf868cfe {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.507029] env[63241]: INFO nova.compute.manager [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Took 11.30 seconds to build instance. [ 2151.009829] env[63241]: DEBUG oslo_concurrency.lockutils [None req-bdec7317-9137-469b-a5a7-da729bb988b5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "55267f9f-66bd-4298-8ac6-19bebe71c05e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.812s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2151.148342] env[63241]: DEBUG nova.compute.manager [req-07d22e16-d32d-452a-b8d6-67d2105645d9 req-26fb4c39-9d24-4212-9301-5506d911cf35 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Received event network-changed-3f0a6003-8737-433a-9490-078edacc86c3 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2151.148540] env[63241]: DEBUG nova.compute.manager [req-07d22e16-d32d-452a-b8d6-67d2105645d9 req-26fb4c39-9d24-4212-9301-5506d911cf35 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Refreshing instance network info cache due to event network-changed-3f0a6003-8737-433a-9490-078edacc86c3. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2151.148748] env[63241]: DEBUG oslo_concurrency.lockutils [req-07d22e16-d32d-452a-b8d6-67d2105645d9 req-26fb4c39-9d24-4212-9301-5506d911cf35 service nova] Acquiring lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2151.148889] env[63241]: DEBUG oslo_concurrency.lockutils [req-07d22e16-d32d-452a-b8d6-67d2105645d9 req-26fb4c39-9d24-4212-9301-5506d911cf35 service nova] Acquired lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2151.149253] env[63241]: DEBUG nova.network.neutron [req-07d22e16-d32d-452a-b8d6-67d2105645d9 req-26fb4c39-9d24-4212-9301-5506d911cf35 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Refreshing network info cache for port 3f0a6003-8737-433a-9490-078edacc86c3 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2151.870341] env[63241]: DEBUG nova.network.neutron [req-07d22e16-d32d-452a-b8d6-67d2105645d9 req-26fb4c39-9d24-4212-9301-5506d911cf35 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Updated VIF entry in instance network info cache for port 3f0a6003-8737-433a-9490-078edacc86c3. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2151.870705] env[63241]: DEBUG nova.network.neutron [req-07d22e16-d32d-452a-b8d6-67d2105645d9 req-26fb4c39-9d24-4212-9301-5506d911cf35 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Updating instance_info_cache with network_info: [{"id": "3f0a6003-8737-433a-9490-078edacc86c3", "address": "fa:16:3e:93:e5:dd", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0a6003-87", "ovs_interfaceid": "3f0a6003-8737-433a-9490-078edacc86c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2152.373771] env[63241]: DEBUG oslo_concurrency.lockutils [req-07d22e16-d32d-452a-b8d6-67d2105645d9 req-26fb4c39-9d24-4212-9301-5506d911cf35 service nova] Releasing lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2174.135413] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2174.135810] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2174.135864] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2174.451579] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.452172] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.452620] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2178.452620] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2178.983847] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2178.983998] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2178.984161] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2178.984317] env[63241]: DEBUG nova.objects.instance [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lazy-loading 'info_cache' on Instance uuid 55267f9f-66bd-4298-8ac6-19bebe71c05e {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2180.688524] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Updating instance_info_cache with network_info: [{"id": "3f0a6003-8737-433a-9490-078edacc86c3", "address": "fa:16:3e:93:e5:dd", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0a6003-87", "ovs_interfaceid": "3f0a6003-8737-433a-9490-078edacc86c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2181.190759] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2181.191126] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2181.191411] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2181.191649] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2181.191865] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2182.452119] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2184.447112] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2185.451494] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2185.954693] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2185.954982] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2185.955169] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2185.955323] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2185.956212] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa0a891-cf2c-42d9-9827-dcc35fd56cc0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.964258] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5aa8390-9545-4e2c-9d2f-b23c7de19136 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.977879] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67c76e5-63b3-41c4-a314-0edbf8d0cf76 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.984117] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8975edd-2950-448b-9e31-9a7624ad2cab {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.013686] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181100MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2186.013835] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2186.013995] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.127621] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "55267f9f-66bd-4298-8ac6-19bebe71c05e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2186.127869] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "55267f9f-66bd-4298-8ac6-19bebe71c05e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.128063] env[63241]: DEBUG nova.compute.manager [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2186.128923] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a237e7e-7a53-4ac2-bc59-4ff81022e630 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.136059] env[63241]: DEBUG nova.compute.manager [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63241) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2186.136597] env[63241]: DEBUG nova.objects.instance [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'flavor' on Instance uuid 55267f9f-66bd-4298-8ac6-19bebe71c05e {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2186.642266] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2186.642714] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b4650e5-7c3b-48d5-8023-cf688e4618d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.649936] env[63241]: DEBUG oslo_vmware.api [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2186.649936] env[63241]: value = "task-1821451" [ 2186.649936] env[63241]: _type = "Task" [ 2186.649936] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.658293] env[63241]: DEBUG oslo_vmware.api [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.037962] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 55267f9f-66bd-4298-8ac6-19bebe71c05e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2187.038190] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2187.038339] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2187.063444] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e564ff3f-b88c-4fa8-976b-fa64cdb20eeb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.070935] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c8dc9b-e493-4183-bc87-58f4ae186ee6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.099164] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fefa50ca-baba-479f-b635-64181c574ffd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.105660] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20db3cde-b68d-447a-96b0-c65ed1b0a02f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.118104] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2187.160353] env[63241]: DEBUG oslo_vmware.api [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821451, 'name': PowerOffVM_Task, 'duration_secs': 0.169956} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.160594] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2187.160767] env[63241]: DEBUG nova.compute.manager [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2187.161504] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69baf6ee-fa52-4598-9fe5-6c576d4aa97d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.637489] env[63241]: ERROR nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [req-94234c83-c8e6-44a1-80d6-b3d573c0c9d4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-94234c83-c8e6-44a1-80d6-b3d573c0c9d4"}]} [ 2187.652528] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2187.664214] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2187.664387] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2187.671373] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8909fa6e-5b78-4689-88ce-6dedbc096bf7 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "55267f9f-66bd-4298-8ac6-19bebe71c05e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.543s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.673943] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2187.692827] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2187.716039] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb11eac6-40c9-48cf-8f99-2ef9e139df06 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.723888] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf004df-3e3c-4944-9469-ba2163bc238e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.753316] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d340e04-4d96-4fe7-8d9e-93408a0d6a23 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.760340] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27da61c7-72fd-42fb-b44e-71fddb1af9cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.773163] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2188.302530] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 189 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2188.302774] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 189 to 190 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2188.302931] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2188.508301] env[63241]: DEBUG nova.objects.instance [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'flavor' on Instance uuid 55267f9f-66bd-4298-8ac6-19bebe71c05e {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2188.807503] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2188.807896] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.794s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.012894] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2189.013165] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2189.013430] env[63241]: DEBUG nova.network.neutron [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2189.013688] env[63241]: DEBUG nova.objects.instance [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'info_cache' on Instance uuid 55267f9f-66bd-4298-8ac6-19bebe71c05e {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2189.517609] env[63241]: DEBUG nova.objects.base [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Object Instance<55267f9f-66bd-4298-8ac6-19bebe71c05e> lazy-loaded attributes: flavor,info_cache {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2190.231999] env[63241]: DEBUG nova.network.neutron [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Updating instance_info_cache with network_info: [{"id": "3f0a6003-8737-433a-9490-078edacc86c3", "address": "fa:16:3e:93:e5:dd", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0a6003-87", "ovs_interfaceid": "3f0a6003-8737-433a-9490-078edacc86c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2190.734579] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2191.237674] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2191.238114] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-740ccc79-b1c5-4c48-b8f0-ca8b93fcb18e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.246415] env[63241]: DEBUG oslo_vmware.api [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2191.246415] env[63241]: value = "task-1821452" [ 2191.246415] env[63241]: _type = "Task" [ 2191.246415] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.253921] env[63241]: DEBUG oslo_vmware.api [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821452, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.756224] env[63241]: DEBUG oslo_vmware.api [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821452, 'name': PowerOnVM_Task, 'duration_secs': 0.410838} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.756503] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2191.756678] env[63241]: DEBUG nova.compute.manager [None req-f4e93639-30ef-4453-9e0f-f322ee1074f9 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2191.757549] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d625ec72-b534-4726-ae75-bf94426070fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.425136] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff3f690-cf54-4824-83b1-9f710b37b045 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.432343] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0c551cab-ff15-4c6e-aaa2-ec30d5c446dd tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Suspending the VM {{(pid=63241) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2193.432600] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-bf91ae08-af4f-43ee-8872-738348d86da7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.439259] env[63241]: DEBUG oslo_vmware.api [None req-0c551cab-ff15-4c6e-aaa2-ec30d5c446dd tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2193.439259] env[63241]: value = "task-1821453" [ 2193.439259] env[63241]: _type = "Task" [ 2193.439259] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2193.448192] env[63241]: DEBUG oslo_vmware.api [None req-0c551cab-ff15-4c6e-aaa2-ec30d5c446dd tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821453, 'name': SuspendVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.949390] env[63241]: DEBUG oslo_vmware.api [None req-0c551cab-ff15-4c6e-aaa2-ec30d5c446dd tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821453, 'name': SuspendVM_Task} progress is 70%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.450931] env[63241]: DEBUG oslo_vmware.api [None req-0c551cab-ff15-4c6e-aaa2-ec30d5c446dd tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821453, 'name': SuspendVM_Task} progress is 70%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.951021] env[63241]: DEBUG oslo_vmware.api [None req-0c551cab-ff15-4c6e-aaa2-ec30d5c446dd tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821453, 'name': SuspendVM_Task, 'duration_secs': 1.454225} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.951307] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-0c551cab-ff15-4c6e-aaa2-ec30d5c446dd tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Suspended the VM {{(pid=63241) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2194.951490] env[63241]: DEBUG nova.compute.manager [None req-0c551cab-ff15-4c6e-aaa2-ec30d5c446dd tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2194.952296] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f9dfd5-2799-459b-811e-3c44b42aa20d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.776523] env[63241]: INFO nova.compute.manager [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Resuming [ 2195.777142] env[63241]: DEBUG nova.objects.instance [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'flavor' on Instance uuid 55267f9f-66bd-4298-8ac6-19bebe71c05e {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2196.786235] env[63241]: DEBUG oslo_concurrency.lockutils [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2196.786653] env[63241]: DEBUG oslo_concurrency.lockutils [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquired lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2196.786653] env[63241]: DEBUG nova.network.neutron [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2197.492244] env[63241]: DEBUG nova.network.neutron [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Updating instance_info_cache with network_info: [{"id": "3f0a6003-8737-433a-9490-078edacc86c3", "address": "fa:16:3e:93:e5:dd", "network": {"id": "0389d407-c9bd-4546-ae22-6a0fc1ef47d8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-341580819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "855da29218ba4391a208e2835f60ee11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0a6003-87", "ovs_interfaceid": "3f0a6003-8737-433a-9490-078edacc86c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2197.994812] env[63241]: DEBUG oslo_concurrency.lockutils [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Releasing lock "refresh_cache-55267f9f-66bd-4298-8ac6-19bebe71c05e" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2197.995769] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa01d2d-a5af-4312-abaf-74bfd5d7e46d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.002610] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Resuming the VM {{(pid=63241) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2198.002823] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d625eea-2c7c-42de-8947-7fe5973ac795 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.009485] env[63241]: DEBUG oslo_vmware.api [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2198.009485] env[63241]: value = "task-1821454" [ 2198.009485] env[63241]: _type = "Task" [ 2198.009485] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.016662] env[63241]: DEBUG oslo_vmware.api [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821454, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2198.521150] env[63241]: DEBUG oslo_vmware.api [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821454, 'name': PowerOnVM_Task, 'duration_secs': 0.494956} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2198.521421] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Resumed the VM {{(pid=63241) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2198.521608] env[63241]: DEBUG nova.compute.manager [None req-943bc65e-be28-46a6-a835-485078f61aa5 tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2198.522454] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a1a4eb-b1a0-43da-83ba-304ce2d9cfae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.337875] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "55267f9f-66bd-4298-8ac6-19bebe71c05e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2199.338340] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "55267f9f-66bd-4298-8ac6-19bebe71c05e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2199.338400] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "55267f9f-66bd-4298-8ac6-19bebe71c05e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2199.338560] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "55267f9f-66bd-4298-8ac6-19bebe71c05e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2199.338735] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "55267f9f-66bd-4298-8ac6-19bebe71c05e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2199.340779] env[63241]: INFO nova.compute.manager [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Terminating instance [ 2199.342400] env[63241]: DEBUG nova.compute.manager [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2199.342632] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2199.343473] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505c2f2a-7ffd-42b6-a99c-8b55f19d119e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.351646] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2199.351864] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0f15485-0136-42a6-915a-153764120af4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.357479] env[63241]: DEBUG oslo_vmware.api [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2199.357479] env[63241]: value = "task-1821455" [ 2199.357479] env[63241]: _type = "Task" [ 2199.357479] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.364610] env[63241]: DEBUG oslo_vmware.api [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.867817] env[63241]: DEBUG oslo_vmware.api [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821455, 'name': PowerOffVM_Task, 'duration_secs': 0.190513} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.868121] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2199.868409] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2199.868742] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f994d214-d099-43be-b2ae-927e89b53234 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.939268] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2199.939478] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2199.939663] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleting the datastore file [datastore1] 55267f9f-66bd-4298-8ac6-19bebe71c05e {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2199.939916] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-042bed14-bd05-490d-a7d1-a302328283c3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.945858] env[63241]: DEBUG oslo_vmware.api [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for the task: (returnval){ [ 2199.945858] env[63241]: value = "task-1821457" [ 2199.945858] env[63241]: _type = "Task" [ 2199.945858] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.953281] env[63241]: DEBUG oslo_vmware.api [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.456465] env[63241]: DEBUG oslo_vmware.api [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Task: {'id': task-1821457, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143013} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2200.456465] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2200.456902] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2200.456902] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2200.456902] env[63241]: INFO nova.compute.manager [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2200.457140] env[63241]: DEBUG oslo.service.loopingcall [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2200.457332] env[63241]: DEBUG nova.compute.manager [-] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2200.457429] env[63241]: DEBUG nova.network.neutron [-] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2201.057312] env[63241]: DEBUG nova.compute.manager [req-08ca5ac5-c9d4-414d-a774-823826cbd0eb req-152c4f79-8a91-459f-9b5c-86b29a813db9 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Received event network-vif-deleted-3f0a6003-8737-433a-9490-078edacc86c3 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2201.057312] env[63241]: INFO nova.compute.manager [req-08ca5ac5-c9d4-414d-a774-823826cbd0eb req-152c4f79-8a91-459f-9b5c-86b29a813db9 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Neutron deleted interface 3f0a6003-8737-433a-9490-078edacc86c3; detaching it from the instance and deleting it from the info cache [ 2201.057312] env[63241]: DEBUG nova.network.neutron [req-08ca5ac5-c9d4-414d-a774-823826cbd0eb req-152c4f79-8a91-459f-9b5c-86b29a813db9 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2201.535094] env[63241]: DEBUG nova.network.neutron [-] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2201.558894] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19a89bc9-4361-4f99-89f3-c5c345a08edb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.569100] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f76f9d6-2c3f-4fff-ad0e-ec20b660dfaa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.593371] env[63241]: DEBUG nova.compute.manager [req-08ca5ac5-c9d4-414d-a774-823826cbd0eb req-152c4f79-8a91-459f-9b5c-86b29a813db9 service nova] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Detach interface failed, port_id=3f0a6003-8737-433a-9490-078edacc86c3, reason: Instance 55267f9f-66bd-4298-8ac6-19bebe71c05e could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2202.037075] env[63241]: INFO nova.compute.manager [-] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Took 1.58 seconds to deallocate network for instance. [ 2202.543184] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2202.543564] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2202.543670] env[63241]: DEBUG nova.objects.instance [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lazy-loading 'resources' on Instance uuid 55267f9f-66bd-4298-8ac6-19bebe71c05e {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2203.076068] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1fa5a8d-4923-482e-900f-d9d14a46d727 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.083358] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52a8411-4e72-40e0-80f9-95ca0d39e852 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.111924] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77450c39-e326-49d6-86a0-3554de7008f2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.118460] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1091453-d78f-49cc-bff8-acbc37851804 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.132278] env[63241]: DEBUG nova.compute.provider_tree [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2203.635500] env[63241]: DEBUG nova.scheduler.client.report [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2204.140398] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.597s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.160119] env[63241]: INFO nova.scheduler.client.report [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Deleted allocations for instance 55267f9f-66bd-4298-8ac6-19bebe71c05e [ 2204.669766] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f3559c77-9996-403c-bc8b-3cc85a3bec7a tempest-ServerActionsTestJSON-1637889344 tempest-ServerActionsTestJSON-1637889344-project-member] Lock "55267f9f-66bd-4298-8ac6-19bebe71c05e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.332s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2209.818122] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2209.818449] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.321404] env[63241]: DEBUG nova.compute.manager [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2210.844812] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2210.845135] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.846846] env[63241]: INFO nova.compute.claims [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2211.883074] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ac99ef-5c12-4e11-bf1f-f62567c8606d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.891012] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fa0ee4-5b4e-4381-a86d-9a78e6567d81 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.920903] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b7876e-87d1-4ae9-9f78-fcd1ae49b269 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.927579] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd8a951-cac1-4054-8d0d-04dd0f34f825 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.940125] env[63241]: DEBUG nova.compute.provider_tree [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2212.442893] env[63241]: DEBUG nova.scheduler.client.report [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2212.948538] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2212.949035] env[63241]: DEBUG nova.compute.manager [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2213.453964] env[63241]: DEBUG nova.compute.utils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2213.455442] env[63241]: DEBUG nova.compute.manager [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2213.455615] env[63241]: DEBUG nova.network.neutron [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2213.521139] env[63241]: DEBUG nova.policy [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ecf711bb36ca4235920b16674379d0d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39ea2ef9af4742768fc75e7a839b0416', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 2213.772786] env[63241]: DEBUG nova.network.neutron [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Successfully created port: 84a13e6f-bea2-418e-a1cc-ece38c4263d5 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2213.959767] env[63241]: DEBUG nova.compute.manager [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2214.969585] env[63241]: DEBUG nova.compute.manager [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2214.994830] env[63241]: DEBUG nova.virt.hardware [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2214.995094] env[63241]: DEBUG nova.virt.hardware [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2214.995209] env[63241]: DEBUG nova.virt.hardware [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2214.995399] env[63241]: DEBUG nova.virt.hardware [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2214.995588] env[63241]: DEBUG nova.virt.hardware [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2214.995744] env[63241]: DEBUG nova.virt.hardware [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2214.995953] env[63241]: DEBUG nova.virt.hardware [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2214.996127] env[63241]: DEBUG nova.virt.hardware [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2214.996298] env[63241]: DEBUG nova.virt.hardware [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2214.996461] env[63241]: DEBUG nova.virt.hardware [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2214.996644] env[63241]: DEBUG nova.virt.hardware [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2214.997530] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df2f4cd-0cdb-4bb6-8ade-a9d05bb99e6b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.005551] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd63105-1111-4024-85a6-b855fcbd2a14 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.141403] env[63241]: DEBUG nova.compute.manager [req-655db11b-c88e-4902-b23c-8011ca4392de req-a9b49743-ab42-4101-8884-21dbc3977ba5 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Received event network-vif-plugged-84a13e6f-bea2-418e-a1cc-ece38c4263d5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2215.141635] env[63241]: DEBUG oslo_concurrency.lockutils [req-655db11b-c88e-4902-b23c-8011ca4392de req-a9b49743-ab42-4101-8884-21dbc3977ba5 service nova] Acquiring lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2215.141900] env[63241]: DEBUG oslo_concurrency.lockutils [req-655db11b-c88e-4902-b23c-8011ca4392de req-a9b49743-ab42-4101-8884-21dbc3977ba5 service nova] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2215.142119] env[63241]: DEBUG oslo_concurrency.lockutils [req-655db11b-c88e-4902-b23c-8011ca4392de req-a9b49743-ab42-4101-8884-21dbc3977ba5 service nova] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2215.142272] env[63241]: DEBUG nova.compute.manager [req-655db11b-c88e-4902-b23c-8011ca4392de req-a9b49743-ab42-4101-8884-21dbc3977ba5 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] No waiting events found dispatching network-vif-plugged-84a13e6f-bea2-418e-a1cc-ece38c4263d5 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2215.142423] env[63241]: WARNING nova.compute.manager [req-655db11b-c88e-4902-b23c-8011ca4392de req-a9b49743-ab42-4101-8884-21dbc3977ba5 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Received unexpected event network-vif-plugged-84a13e6f-bea2-418e-a1cc-ece38c4263d5 for instance with vm_state building and task_state spawning. [ 2215.676698] env[63241]: DEBUG nova.network.neutron [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Successfully updated port: 84a13e6f-bea2-418e-a1cc-ece38c4263d5 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2215.698431] env[63241]: DEBUG nova.compute.manager [req-a4045d3e-1778-4ade-8dfe-3d38854126e6 req-3baf717d-cc01-4693-af43-3c115c4d696d service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Received event network-changed-84a13e6f-bea2-418e-a1cc-ece38c4263d5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2215.698551] env[63241]: DEBUG nova.compute.manager [req-a4045d3e-1778-4ade-8dfe-3d38854126e6 req-3baf717d-cc01-4693-af43-3c115c4d696d service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Refreshing instance network info cache due to event network-changed-84a13e6f-bea2-418e-a1cc-ece38c4263d5. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2215.698749] env[63241]: DEBUG oslo_concurrency.lockutils [req-a4045d3e-1778-4ade-8dfe-3d38854126e6 req-3baf717d-cc01-4693-af43-3c115c4d696d service nova] Acquiring lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2215.698895] env[63241]: DEBUG oslo_concurrency.lockutils [req-a4045d3e-1778-4ade-8dfe-3d38854126e6 req-3baf717d-cc01-4693-af43-3c115c4d696d service nova] Acquired lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2215.699070] env[63241]: DEBUG nova.network.neutron [req-a4045d3e-1778-4ade-8dfe-3d38854126e6 req-3baf717d-cc01-4693-af43-3c115c4d696d service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Refreshing network info cache for port 84a13e6f-bea2-418e-a1cc-ece38c4263d5 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2216.179802] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2216.229161] env[63241]: DEBUG nova.network.neutron [req-a4045d3e-1778-4ade-8dfe-3d38854126e6 req-3baf717d-cc01-4693-af43-3c115c4d696d service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2216.296107] env[63241]: DEBUG nova.network.neutron [req-a4045d3e-1778-4ade-8dfe-3d38854126e6 req-3baf717d-cc01-4693-af43-3c115c4d696d service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.798762] env[63241]: DEBUG oslo_concurrency.lockutils [req-a4045d3e-1778-4ade-8dfe-3d38854126e6 req-3baf717d-cc01-4693-af43-3c115c4d696d service nova] Releasing lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2216.799168] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2216.799327] env[63241]: DEBUG nova.network.neutron [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2217.331819] env[63241]: DEBUG nova.network.neutron [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2217.450626] env[63241]: DEBUG nova.network.neutron [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance_info_cache with network_info: [{"id": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "address": "fa:16:3e:55:ba:26", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84a13e6f-be", "ovs_interfaceid": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2217.953115] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2217.953450] env[63241]: DEBUG nova.compute.manager [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Instance network_info: |[{"id": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "address": "fa:16:3e:55:ba:26", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84a13e6f-be", "ovs_interfaceid": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2217.953876] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:ba:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84a13e6f-bea2-418e-a1cc-ece38c4263d5', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2217.961541] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating folder: Project (39ea2ef9af4742768fc75e7a839b0416). Parent ref: group-v376927. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2217.961814] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-753acb04-ccb3-46b1-a8b9-a6acf476611a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.032056] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Created folder: Project (39ea2ef9af4742768fc75e7a839b0416) in parent group-v376927. [ 2218.032232] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating folder: Instances. Parent ref: group-v377246. {{(pid=63241) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2218.032457] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8ad385c-5283-42c6-9d0e-8004af0b093d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.041729] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Created folder: Instances in parent group-v377246. [ 2218.041949] env[63241]: DEBUG oslo.service.loopingcall [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2218.042142] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2218.042329] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c81570c-429a-4cad-8e89-133ea390c165 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.059360] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2218.059360] env[63241]: value = "task-1821460" [ 2218.059360] env[63241]: _type = "Task" [ 2218.059360] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.066421] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821460, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.568840] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821460, 'name': CreateVM_Task, 'duration_secs': 0.315662} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.569351] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2218.569684] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.569856] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2218.570193] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2218.570438] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9a1394f-0ed8-48f8-99d1-e0a3875675db {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.574795] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2218.574795] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52e96689-815e-9dea-8fcf-d2373b1dcc48" [ 2218.574795] env[63241]: _type = "Task" [ 2218.574795] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.582011] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e96689-815e-9dea-8fcf-d2373b1dcc48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.084965] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52e96689-815e-9dea-8fcf-d2373b1dcc48, 'name': SearchDatastore_Task, 'duration_secs': 0.010729} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.085285] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2219.085522] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2219.085749] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2219.085904] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2219.086085] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2219.086330] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1744f2d2-ea20-4d17-9bf6-de123b3b2135 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.094079] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2219.094893] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2219.095074] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b7b3f5c-a9dd-421f-ac61-357613972e42 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.099951] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2219.099951] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5291293d-22d0-1e02-dc85-8c5ce4330171" [ 2219.099951] env[63241]: _type = "Task" [ 2219.099951] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.107042] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5291293d-22d0-1e02-dc85-8c5ce4330171, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.610945] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5291293d-22d0-1e02-dc85-8c5ce4330171, 'name': SearchDatastore_Task, 'duration_secs': 0.009268} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.611737] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-889e449d-91a5-4537-9674-54da9476d54e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.616810] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2219.616810] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52a0c80c-b1c4-8faa-fdc1-08429f71e5ac" [ 2219.616810] env[63241]: _type = "Task" [ 2219.616810] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.624772] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a0c80c-b1c4-8faa-fdc1-08429f71e5ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.127054] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52a0c80c-b1c4-8faa-fdc1-08429f71e5ac, 'name': SearchDatastore_Task, 'duration_secs': 0.009508} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.127329] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2220.127582] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4/0b5dcad5-2877-44bd-b438-a2f88dbc2ef4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2220.127832] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce99c995-8a87-4d9d-a2bf-53d501e50985 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.134467] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2220.134467] env[63241]: value = "task-1821461" [ 2220.134467] env[63241]: _type = "Task" [ 2220.134467] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.141852] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821461, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.644426] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821461, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.414476} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.644861] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4/0b5dcad5-2877-44bd-b438-a2f88dbc2ef4.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2220.644861] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2220.645119] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf508253-6ac1-448d-9356-c138be649075 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.651381] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2220.651381] env[63241]: value = "task-1821462" [ 2220.651381] env[63241]: _type = "Task" [ 2220.651381] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.658341] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.161918] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058015} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.162197] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2221.162950] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f695f090-c3c9-4e8a-994d-8b9e70f17767 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.184165] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4/0b5dcad5-2877-44bd-b438-a2f88dbc2ef4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2221.184449] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc18790b-48f6-4711-9cfc-0e99cd9c6036 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.205166] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2221.205166] env[63241]: value = "task-1821463" [ 2221.205166] env[63241]: _type = "Task" [ 2221.205166] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.212645] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821463, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.714679] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821463, 'name': ReconfigVM_Task, 'duration_secs': 0.25818} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.715111] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4/0b5dcad5-2877-44bd-b438-a2f88dbc2ef4.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2221.715551] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-50d4ab13-ec2e-430e-87ca-0d14358fb459 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.721433] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2221.721433] env[63241]: value = "task-1821464" [ 2221.721433] env[63241]: _type = "Task" [ 2221.721433] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.729662] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821464, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.230903] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821464, 'name': Rename_Task, 'duration_secs': 0.136766} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.231199] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2222.231440] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06e54f25-4ccd-4e50-969d-cf1e0df41b9a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.237558] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2222.237558] env[63241]: value = "task-1821465" [ 2222.237558] env[63241]: _type = "Task" [ 2222.237558] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.244551] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821465, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.747821] env[63241]: DEBUG oslo_vmware.api [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821465, 'name': PowerOnVM_Task, 'duration_secs': 0.418268} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.748206] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2222.748284] env[63241]: INFO nova.compute.manager [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Took 7.78 seconds to spawn the instance on the hypervisor. [ 2222.748479] env[63241]: DEBUG nova.compute.manager [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2222.749251] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394e0431-8842-443a-b1fb-61633c99be73 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.269220] env[63241]: INFO nova.compute.manager [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Took 12.44 seconds to build instance. [ 2223.771329] env[63241]: DEBUG oslo_concurrency.lockutils [None req-9ad8f353-062b-4198-b2b1-7d0c0f8a5492 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.953s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2223.901955] env[63241]: DEBUG nova.compute.manager [req-e927ad10-ddff-4da0-ba3c-16321bd238ff req-2ab7f77e-61bf-44bd-9d95-280e4a4438c2 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Received event network-changed-84a13e6f-bea2-418e-a1cc-ece38c4263d5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2223.902177] env[63241]: DEBUG nova.compute.manager [req-e927ad10-ddff-4da0-ba3c-16321bd238ff req-2ab7f77e-61bf-44bd-9d95-280e4a4438c2 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Refreshing instance network info cache due to event network-changed-84a13e6f-bea2-418e-a1cc-ece38c4263d5. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2223.902429] env[63241]: DEBUG oslo_concurrency.lockutils [req-e927ad10-ddff-4da0-ba3c-16321bd238ff req-2ab7f77e-61bf-44bd-9d95-280e4a4438c2 service nova] Acquiring lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2223.902589] env[63241]: DEBUG oslo_concurrency.lockutils [req-e927ad10-ddff-4da0-ba3c-16321bd238ff req-2ab7f77e-61bf-44bd-9d95-280e4a4438c2 service nova] Acquired lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2223.902750] env[63241]: DEBUG nova.network.neutron [req-e927ad10-ddff-4da0-ba3c-16321bd238ff req-2ab7f77e-61bf-44bd-9d95-280e4a4438c2 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Refreshing network info cache for port 84a13e6f-bea2-418e-a1cc-ece38c4263d5 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2224.611474] env[63241]: DEBUG nova.network.neutron [req-e927ad10-ddff-4da0-ba3c-16321bd238ff req-2ab7f77e-61bf-44bd-9d95-280e4a4438c2 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updated VIF entry in instance network info cache for port 84a13e6f-bea2-418e-a1cc-ece38c4263d5. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2224.611828] env[63241]: DEBUG nova.network.neutron [req-e927ad10-ddff-4da0-ba3c-16321bd238ff req-2ab7f77e-61bf-44bd-9d95-280e4a4438c2 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance_info_cache with network_info: [{"id": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "address": "fa:16:3e:55:ba:26", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84a13e6f-be", "ovs_interfaceid": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2225.114714] env[63241]: DEBUG oslo_concurrency.lockutils [req-e927ad10-ddff-4da0-ba3c-16321bd238ff req-2ab7f77e-61bf-44bd-9d95-280e4a4438c2 service nova] Releasing lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2235.804261] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2235.804691] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2235.804803] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2235.804881] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2238.452447] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2238.452823] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2238.452823] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2239.028110] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2239.028271] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2239.028424] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2239.028589] env[63241]: DEBUG nova.objects.instance [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lazy-loading 'info_cache' on Instance uuid 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2240.735595] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance_info_cache with network_info: [{"id": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "address": "fa:16:3e:55:ba:26", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84a13e6f-be", "ovs_interfaceid": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2241.238530] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2241.238695] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2241.238882] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2241.239051] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2241.451431] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2244.451899] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2247.452190] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2247.955377] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2247.955614] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2247.955767] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2247.955920] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2247.956863] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0667ffd4-b1e0-4afd-bda1-0564720be8e8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.965663] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de570bb2-c0da-4caf-ac00-6569643ac014 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.980286] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7c7eaf-a612-4f71-a8a3-ca292c1bee58 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.986667] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e405df2-10ff-414a-a114-dd3cce1ea1b4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.015103] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181094MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2248.015260] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2248.015455] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2249.039996] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2249.040272] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2249.040358] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2249.065619] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36164918-7930-437d-a704-58170e750066 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.073089] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615c735a-121f-4627-8534-588216b79242 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.103549] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ae3d2a-28ee-432c-aa9a-99e06d5699a1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.111203] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6589c8a5-1690-454d-b0a4-8fb4398b437b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.124086] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2249.627693] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2250.132968] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2250.133251] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.118s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2262.719116] env[63241]: DEBUG nova.compute.manager [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2262.720095] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed01c3f8-b5dc-4b12-92f8-0abdb45385fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.230648] env[63241]: INFO nova.compute.manager [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] instance snapshotting [ 2263.231283] env[63241]: DEBUG nova.objects.instance [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'flavor' on Instance uuid 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2263.737675] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501a8826-dea6-482a-825d-9dcecc408300 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.757662] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0066bb-f00d-444e-bbcc-ec2619eced5e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.268328] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2264.268648] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c0191aec-c80c-4896-9a0b-957a9fb45b15 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.278563] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2264.278563] env[63241]: value = "task-1821466" [ 2264.278563] env[63241]: _type = "Task" [ 2264.278563] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.286290] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821466, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.788875] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821466, 'name': CreateSnapshot_Task, 'duration_secs': 0.442718} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2264.789285] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2264.789821] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c76ea4-e034-4866-aebb-651e62368624 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.307108] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2265.307421] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ef9657c8-d738-4bb9-9c18-9724c45af8c4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.315529] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2265.315529] env[63241]: value = "task-1821467" [ 2265.315529] env[63241]: _type = "Task" [ 2265.315529] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.323351] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821467, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.826388] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821467, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.327499] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821467, 'name': CloneVM_Task, 'duration_secs': 0.935236} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2266.327752] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Created linked-clone VM from snapshot [ 2266.328484] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c1bd5a-123a-4ba9-87ae-b00bd5d4bc25 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.335556] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Uploading image d509c711-36ea-47c3-9777-2978b4f464d3 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2266.354422] env[63241]: DEBUG oslo_vmware.rw_handles [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2266.354422] env[63241]: value = "vm-377250" [ 2266.354422] env[63241]: _type = "VirtualMachine" [ 2266.354422] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2266.354711] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-323d6a56-9237-4a29-ba04-9727af01c86d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.361367] env[63241]: DEBUG oslo_vmware.rw_handles [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lease: (returnval){ [ 2266.361367] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52bee302-bf4e-149b-307f-a73f08d600e2" [ 2266.361367] env[63241]: _type = "HttpNfcLease" [ 2266.361367] env[63241]: } obtained for exporting VM: (result){ [ 2266.361367] env[63241]: value = "vm-377250" [ 2266.361367] env[63241]: _type = "VirtualMachine" [ 2266.361367] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2266.361794] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the lease: (returnval){ [ 2266.361794] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52bee302-bf4e-149b-307f-a73f08d600e2" [ 2266.361794] env[63241]: _type = "HttpNfcLease" [ 2266.361794] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2266.367403] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2266.367403] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52bee302-bf4e-149b-307f-a73f08d600e2" [ 2266.367403] env[63241]: _type = "HttpNfcLease" [ 2266.367403] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2266.869235] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2266.869235] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52bee302-bf4e-149b-307f-a73f08d600e2" [ 2266.869235] env[63241]: _type = "HttpNfcLease" [ 2266.869235] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2266.869754] env[63241]: DEBUG oslo_vmware.rw_handles [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2266.869754] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52bee302-bf4e-149b-307f-a73f08d600e2" [ 2266.869754] env[63241]: _type = "HttpNfcLease" [ 2266.869754] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2266.870247] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17737f8e-5525-4c04-acc7-489107968b06 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.877128] env[63241]: DEBUG oslo_vmware.rw_handles [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a9034-68b9-35fc-57a8-42bbbb07d7a7/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2266.877300] env[63241]: DEBUG oslo_vmware.rw_handles [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a9034-68b9-35fc-57a8-42bbbb07d7a7/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2266.975955] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a7084ae5-8b9a-4fab-a534-bb00f247ca5c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.155290] env[63241]: DEBUG oslo_vmware.rw_handles [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a9034-68b9-35fc-57a8-42bbbb07d7a7/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2274.156286] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca16b18-21bf-4771-bbae-7826627f864b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.162531] env[63241]: DEBUG oslo_vmware.rw_handles [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a9034-68b9-35fc-57a8-42bbbb07d7a7/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2274.162702] env[63241]: ERROR oslo_vmware.rw_handles [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a9034-68b9-35fc-57a8-42bbbb07d7a7/disk-0.vmdk due to incomplete transfer. [ 2274.162914] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-af2def65-d80a-4be9-9866-2126dd2b85ac {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.169820] env[63241]: DEBUG oslo_vmware.rw_handles [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524a9034-68b9-35fc-57a8-42bbbb07d7a7/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2274.170021] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Uploaded image d509c711-36ea-47c3-9777-2978b4f464d3 to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2274.172400] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2274.172626] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0313adb7-fb81-4a3a-a1cd-ba35e82f3c7b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.177624] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2274.177624] env[63241]: value = "task-1821469" [ 2274.177624] env[63241]: _type = "Task" [ 2274.177624] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.185504] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821469, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2274.688937] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821469, 'name': Destroy_Task, 'duration_secs': 0.369718} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2274.689161] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Destroyed the VM [ 2274.689395] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2274.689646] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b1019b40-f9bb-49cb-9c90-ccb881908351 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.695878] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2274.695878] env[63241]: value = "task-1821470" [ 2274.695878] env[63241]: _type = "Task" [ 2274.695878] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2274.703038] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821470, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.205926] env[63241]: DEBUG oslo_vmware.api [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821470, 'name': RemoveSnapshot_Task, 'duration_secs': 0.490008} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2275.206329] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2275.206462] env[63241]: INFO nova.compute.manager [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Took 11.47 seconds to snapshot the instance on the hypervisor. [ 2275.746015] env[63241]: DEBUG nova.compute.manager [None req-86cfa001-157e-4bb4-bf3e-9385ea7a33df tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Found 1 images (rotation: 2) {{(pid=63241) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 2276.490113] env[63241]: DEBUG nova.compute.manager [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2276.491049] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3450e84b-2995-449f-a603-508dd01a41cb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.002340] env[63241]: INFO nova.compute.manager [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] instance snapshotting [ 2277.003131] env[63241]: DEBUG nova.objects.instance [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'flavor' on Instance uuid 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2277.509481] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a0091e-ee0d-4f81-9bb2-89ff08dde375 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.527972] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030b17b0-cbfe-4d12-9cb9-a32be6fa8ea1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.038023] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2278.038376] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d86a990e-146f-4c88-a228-64779b387259 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.046029] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2278.046029] env[63241]: value = "task-1821471" [ 2278.046029] env[63241]: _type = "Task" [ 2278.046029] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2278.055368] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821471, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.556478] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821471, 'name': CreateSnapshot_Task, 'duration_secs': 0.488317} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2278.556868] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2278.557466] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2a2fc7-37cc-4fd1-9cd6-a9348dea82d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.074748] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2279.075087] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a9a9dfeb-67a5-4252-af72-6f7b6ee54585 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.084321] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2279.084321] env[63241]: value = "task-1821472" [ 2279.084321] env[63241]: _type = "Task" [ 2279.084321] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2279.092504] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821472, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.595111] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821472, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.095390] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821472, 'name': CloneVM_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2280.596343] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821472, 'name': CloneVM_Task, 'duration_secs': 1.057596} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2280.596730] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Created linked-clone VM from snapshot [ 2280.597363] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800f563b-03ca-4e6c-a23d-583b20f72a7e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.604385] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Uploading image 0a3e29ad-d9ee-4a80-8706-b17d79ff9807 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2280.624184] env[63241]: DEBUG oslo_vmware.rw_handles [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2280.624184] env[63241]: value = "vm-377252" [ 2280.624184] env[63241]: _type = "VirtualMachine" [ 2280.624184] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2280.624412] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a759beda-033b-4f30-918f-405b42b73ad6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.630280] env[63241]: DEBUG oslo_vmware.rw_handles [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lease: (returnval){ [ 2280.630280] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5276ca94-323c-7b7f-232e-cd88511e0f03" [ 2280.630280] env[63241]: _type = "HttpNfcLease" [ 2280.630280] env[63241]: } obtained for exporting VM: (result){ [ 2280.630280] env[63241]: value = "vm-377252" [ 2280.630280] env[63241]: _type = "VirtualMachine" [ 2280.630280] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2280.630674] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the lease: (returnval){ [ 2280.630674] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5276ca94-323c-7b7f-232e-cd88511e0f03" [ 2280.630674] env[63241]: _type = "HttpNfcLease" [ 2280.630674] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2280.636629] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2280.636629] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5276ca94-323c-7b7f-232e-cd88511e0f03" [ 2280.636629] env[63241]: _type = "HttpNfcLease" [ 2280.636629] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2281.139226] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2281.139226] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5276ca94-323c-7b7f-232e-cd88511e0f03" [ 2281.139226] env[63241]: _type = "HttpNfcLease" [ 2281.139226] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2281.139540] env[63241]: DEBUG oslo_vmware.rw_handles [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2281.139540] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5276ca94-323c-7b7f-232e-cd88511e0f03" [ 2281.139540] env[63241]: _type = "HttpNfcLease" [ 2281.139540] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2281.140209] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c39980-0c1c-40f5-80aa-44d9aeb09155 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.146838] env[63241]: DEBUG oslo_vmware.rw_handles [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520aec66-59a8-fdce-55ce-1530ba5a35e9/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2281.147109] env[63241]: DEBUG oslo_vmware.rw_handles [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520aec66-59a8-fdce-55ce-1530ba5a35e9/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2281.230269] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5f9d4f96-2ced-4c1a-961d-3984dd64dbb0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.400704] env[63241]: DEBUG oslo_vmware.rw_handles [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520aec66-59a8-fdce-55ce-1530ba5a35e9/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2288.401630] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2351067e-dfe0-47d0-8ab5-23cbab8debd1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.407810] env[63241]: DEBUG oslo_vmware.rw_handles [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520aec66-59a8-fdce-55ce-1530ba5a35e9/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2288.407980] env[63241]: ERROR oslo_vmware.rw_handles [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520aec66-59a8-fdce-55ce-1530ba5a35e9/disk-0.vmdk due to incomplete transfer. [ 2288.408207] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2e26ce4a-e300-4e0b-aa1e-a9ca9170f748 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.415379] env[63241]: DEBUG oslo_vmware.rw_handles [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520aec66-59a8-fdce-55ce-1530ba5a35e9/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2288.415567] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Uploaded image 0a3e29ad-d9ee-4a80-8706-b17d79ff9807 to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2288.417853] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2288.418086] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-83ba52a4-013d-4474-9476-539d64e65663 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.423333] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2288.423333] env[63241]: value = "task-1821474" [ 2288.423333] env[63241]: _type = "Task" [ 2288.423333] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.430351] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821474, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.934302] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821474, 'name': Destroy_Task, 'duration_secs': 0.329164} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2288.934616] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Destroyed the VM [ 2288.934898] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2288.935189] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0d9172d1-47ab-4fa4-b437-cc021b2bc7be {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.940969] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2288.940969] env[63241]: value = "task-1821475" [ 2288.940969] env[63241]: _type = "Task" [ 2288.940969] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.949357] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821475, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.450373] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821475, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.952156] env[63241]: DEBUG oslo_vmware.api [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821475, 'name': RemoveSnapshot_Task, 'duration_secs': 0.538497} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.952460] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2289.952659] env[63241]: INFO nova.compute.manager [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Took 12.44 seconds to snapshot the instance on the hypervisor. [ 2290.510376] env[63241]: DEBUG nova.compute.manager [None req-64d44d8e-9221-499d-ab99-395a18b43bf0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Found 2 images (rotation: 2) {{(pid=63241) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 2291.309789] env[63241]: DEBUG nova.compute.manager [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2291.310775] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b108f8c-ea78-4b36-8ae7-a357f4e6f2ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.821262] env[63241]: INFO nova.compute.manager [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] instance snapshotting [ 2291.821962] env[63241]: DEBUG nova.objects.instance [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'flavor' on Instance uuid 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2292.327995] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888a34cf-65b4-4e7e-bd63-1b3485683ed0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.346847] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c01c17-7670-40e6-a4bd-71e9f4684d15 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.857148] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2292.857502] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8f97e610-09ab-4b76-8152-9a3b509883c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.865245] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2292.865245] env[63241]: value = "task-1821476" [ 2292.865245] env[63241]: _type = "Task" [ 2292.865245] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.875020] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821476, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.375286] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821476, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.876430] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821476, 'name': CreateSnapshot_Task, 'duration_secs': 0.515858} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2293.876829] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2293.877451] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5828b43a-d2d9-467f-951e-8d29779d30b3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.395769] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2294.396027] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fa220fdf-62ad-4fd0-9d34-9250c579794c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.405491] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2294.405491] env[63241]: value = "task-1821477" [ 2294.405491] env[63241]: _type = "Task" [ 2294.405491] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2294.413767] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821477, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2294.917091] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821477, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2295.128467] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2295.128785] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2295.128922] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2295.417396] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821477, 'name': CloneVM_Task, 'duration_secs': 0.901132} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2295.417646] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Created linked-clone VM from snapshot [ 2295.418386] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed8637b-078d-4829-98c8-67eed6dcebeb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.425224] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Uploading image f2935e3a-204b-4f69-abf7-bb8a73e792b6 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2295.449133] env[63241]: DEBUG oslo_vmware.rw_handles [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2295.449133] env[63241]: value = "vm-377254" [ 2295.449133] env[63241]: _type = "VirtualMachine" [ 2295.449133] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2295.449389] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1db475aa-de25-4cb6-9004-9a1fff132e05 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.451200] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2295.455835] env[63241]: DEBUG oslo_vmware.rw_handles [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lease: (returnval){ [ 2295.455835] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52aa062e-a913-0cae-1cb8-b722239e25e8" [ 2295.455835] env[63241]: _type = "HttpNfcLease" [ 2295.455835] env[63241]: } obtained for exporting VM: (result){ [ 2295.455835] env[63241]: value = "vm-377254" [ 2295.455835] env[63241]: _type = "VirtualMachine" [ 2295.455835] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2295.456302] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the lease: (returnval){ [ 2295.456302] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52aa062e-a913-0cae-1cb8-b722239e25e8" [ 2295.456302] env[63241]: _type = "HttpNfcLease" [ 2295.456302] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2295.461874] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2295.461874] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52aa062e-a913-0cae-1cb8-b722239e25e8" [ 2295.461874] env[63241]: _type = "HttpNfcLease" [ 2295.461874] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2295.965088] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2295.965088] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52aa062e-a913-0cae-1cb8-b722239e25e8" [ 2295.965088] env[63241]: _type = "HttpNfcLease" [ 2295.965088] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2295.965664] env[63241]: DEBUG oslo_vmware.rw_handles [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2295.965664] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52aa062e-a913-0cae-1cb8-b722239e25e8" [ 2295.965664] env[63241]: _type = "HttpNfcLease" [ 2295.965664] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2295.966247] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bc8a1e-3cdf-4243-9d16-2bd9f0905eb1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.974198] env[63241]: DEBUG oslo_vmware.rw_handles [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ab465a-7f1e-add8-4ef4-42746a634dac/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2295.974371] env[63241]: DEBUG oslo_vmware.rw_handles [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ab465a-7f1e-add8-4ef4-42746a634dac/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2296.063027] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-757a667a-57b0-45f2-b7fa-7335d0436ec3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.451671] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2299.452111] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2299.452111] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2299.956025] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2299.956256] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2299.956256] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2299.956388] env[63241]: DEBUG nova.objects.instance [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lazy-loading 'info_cache' on Instance uuid 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2301.695368] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance_info_cache with network_info: [{"id": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "address": "fa:16:3e:55:ba:26", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84a13e6f-be", "ovs_interfaceid": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2302.198766] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2302.199020] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2302.199239] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2302.199403] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2302.199550] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2303.310578] env[63241]: DEBUG oslo_vmware.rw_handles [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ab465a-7f1e-add8-4ef4-42746a634dac/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2303.311408] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a099ecb-5f4b-43e8-9958-7c7b60825c06 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.317649] env[63241]: DEBUG oslo_vmware.rw_handles [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ab465a-7f1e-add8-4ef4-42746a634dac/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2303.317814] env[63241]: ERROR oslo_vmware.rw_handles [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ab465a-7f1e-add8-4ef4-42746a634dac/disk-0.vmdk due to incomplete transfer. [ 2303.318043] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a59b2343-62b0-48ca-9a47-37d940f1f72e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.325011] env[63241]: DEBUG oslo_vmware.rw_handles [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ab465a-7f1e-add8-4ef4-42746a634dac/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2303.325223] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Uploaded image f2935e3a-204b-4f69-abf7-bb8a73e792b6 to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2303.327498] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2303.327725] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-40e7d3ac-44d8-4d93-bcd2-e168a3d10a47 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.332883] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2303.332883] env[63241]: value = "task-1821479" [ 2303.332883] env[63241]: _type = "Task" [ 2303.332883] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.340121] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821479, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.842216] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821479, 'name': Destroy_Task, 'duration_secs': 0.355147} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2303.842516] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Destroyed the VM [ 2303.842752] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2303.842989] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-769543ea-1691-4631-ac07-af4ee1b36925 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.848967] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2303.848967] env[63241]: value = "task-1821480" [ 2303.848967] env[63241]: _type = "Task" [ 2303.848967] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.856118] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821480, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.359408] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821480, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.452569] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2304.860475] env[63241]: DEBUG oslo_vmware.api [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821480, 'name': RemoveSnapshot_Task, 'duration_secs': 0.929047} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2304.860722] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2304.860950] env[63241]: INFO nova.compute.manager [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Took 12.53 seconds to snapshot the instance on the hypervisor. [ 2305.405989] env[63241]: DEBUG nova.compute.manager [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Found 3 images (rotation: 2) {{(pid=63241) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 2305.406273] env[63241]: DEBUG nova.compute.manager [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Rotating out 1 backups {{(pid=63241) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4563}} [ 2305.406380] env[63241]: DEBUG nova.compute.manager [None req-e7a25972-7e70-411d-ad11-34896c61402d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Deleting image d509c711-36ea-47c3-9777-2978b4f464d3 {{(pid=63241) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4568}} [ 2305.949190] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2306.454702] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2306.454702] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2306.454702] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2306.960716] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] There are 10 instances to clean {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 2306.960941] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 55267f9f-66bd-4298-8ac6-19bebe71c05e] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2307.008097] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2307.008333] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2307.008493] env[63241]: DEBUG nova.compute.manager [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2307.009382] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98147643-5726-4d20-b684-9a4f9c1b5488 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.016705] env[63241]: DEBUG nova.compute.manager [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63241) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2307.017245] env[63241]: DEBUG nova.objects.instance [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'flavor' on Instance uuid 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2307.464687] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: db514556-34d3-4a55-97e6-69b848e8f2ed] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2307.521127] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2307.521409] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3a4b9b2-ad8d-405a-80f2-a2b86ca2becb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.529837] env[63241]: DEBUG oslo_vmware.api [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2307.529837] env[63241]: value = "task-1821481" [ 2307.529837] env[63241]: _type = "Task" [ 2307.529837] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2307.538236] env[63241]: DEBUG oslo_vmware.api [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821481, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2307.968714] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 642c896e-64f8-499c-8498-6ad756de8b70] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2308.039642] env[63241]: DEBUG oslo_vmware.api [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821481, 'name': PowerOffVM_Task, 'duration_secs': 0.171905} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2308.039919] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2308.040081] env[63241]: DEBUG nova.compute.manager [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2308.040871] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea272c3-30b9-4b54-8452-3de059d68a72 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.472306] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e2aea319-280e-4dc8-9c90-f080cdf2a08a] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2308.551998] env[63241]: DEBUG oslo_concurrency.lockutils [None req-ab092be1-5618-4727-b3d9-2e6f200673d5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.544s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2308.976728] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 8a4e3fe7-987a-4d9d-b154-8a7ca9b20b59] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2309.482019] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 43684f7f-0a5d-48e5-8ab6-573db8d81ff0] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2309.985293] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 215f658f-2af6-4525-b94c-489ad794e6f7] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2310.149960] env[63241]: DEBUG nova.compute.manager [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Stashing vm_state: stopped {{(pid=63241) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2310.489996] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e4514260-dfcc-45a3-80d5-b5484b0b599c] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2310.670178] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2310.670443] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.993562] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: e28ba013-0bc5-4edc-858d-674980bc8742] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2311.175025] env[63241]: INFO nova.compute.claims [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2311.496799] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 31e84206-e583-4610-969e-2ccae2d0b206] Instance has had 0 of 5 cleanup attempts {{(pid=63241) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2311.681473] env[63241]: INFO nova.compute.resource_tracker [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating resource usage from migration cc83bcd7-e9a7-4039-bbc4-3328b3e2141c [ 2311.718358] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51cf1b6f-2fdc-4dee-8a1a-0a16ec41e3a6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.726186] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccfa872-e717-4b7f-90ac-aaa1ef1751a8 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.756042] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4eb80c6-5f02-4182-984f-79cc8d4bb5f2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.762891] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc38a2d-7386-4b6f-b826-0d1475258250 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.445013] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2312.445201] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Cleaning up deleted instances with incomplete migration {{(pid=63241) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 2312.456398] env[63241]: DEBUG nova.compute.provider_tree [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2312.959217] env[63241]: DEBUG nova.scheduler.client.report [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2313.263022] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2313.464241] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.794s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.464502] env[63241]: INFO nova.compute.manager [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Migrating [ 2313.766404] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2313.766700] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2313.766807] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.766966] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2313.767877] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4828c4a2-5f36-40c9-82f7-35a2fddccd21 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.776756] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc92eec-71fa-475e-bfd6-951105f1325a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.790738] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91444219-0930-4c37-9302-139389a3d4bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.797546] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2787800-f7ca-41f1-a722-a64d22e8b5f9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.827529] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180937MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2313.827721] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2313.827854] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2313.978981] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2313.979483] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2313.979483] env[63241]: DEBUG nova.network.neutron [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2314.783137] env[63241]: DEBUG nova.network.neutron [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance_info_cache with network_info: [{"id": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "address": "fa:16:3e:55:ba:26", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84a13e6f-be", "ovs_interfaceid": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2314.834723] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Applying migration context for instance 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 as it has an incoming, in-progress migration cc83bcd7-e9a7-4039-bbc4-3328b3e2141c. Migration status is pre-migrating {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2314.835293] env[63241]: INFO nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating resource usage from migration cc83bcd7-e9a7-4039-bbc4-3328b3e2141c [ 2314.850241] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Migration cc83bcd7-e9a7-4039-bbc4-3328b3e2141c is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2314.850393] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2314.850550] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2314.850681] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=960MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2314.888089] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8527c551-63c2-46a6-a7eb-aa46e9a21dae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.896012] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a92ac3a-bc2d-4b52-a97f-40c3ddc1f513 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.926679] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca92c192-e624-4283-bdb9-811f3f5ae647 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.934902] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5eee872-1258-4712-b223-eb3eb5ee4d82 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.950293] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2315.286418] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2315.453480] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2315.958646] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2315.958834] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.131s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.958984] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2315.959372] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Getting list of instances from cluster (obj){ [ 2315.959372] env[63241]: value = "domain-c8" [ 2315.959372] env[63241]: _type = "ClusterComputeResource" [ 2315.959372] env[63241]: } {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2315.960422] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e87bc1-59c1-4365-8521-d6c633fe7be1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.970779] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Got total of 1 instances {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2316.801858] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e8bd24-d571-4d2d-84fd-73bd4e6202d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.821496] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance '0b5dcad5-2877-44bd-b438-a2f88dbc2ef4' progress to 0 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2317.328343] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2317.328639] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d204aeb0-68ff-4e6c-ad0b-1fcfbff4a77d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.335904] env[63241]: DEBUG oslo_vmware.api [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2317.335904] env[63241]: value = "task-1821482" [ 2317.335904] env[63241]: _type = "Task" [ 2317.335904] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.343273] env[63241]: DEBUG oslo_vmware.api [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.848761] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2317.848761] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance '0b5dcad5-2877-44bd-b438-a2f88dbc2ef4' progress to 17 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2318.353461] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2318.353755] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2318.353899] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2318.354135] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2318.354294] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2318.354445] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2318.354650] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2318.354823] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2318.355044] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2318.355219] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2318.355396] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2318.360506] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-395a7c85-49d2-4ab8-b090-6d874d7eb35a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.376186] env[63241]: DEBUG oslo_vmware.api [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2318.376186] env[63241]: value = "task-1821483" [ 2318.376186] env[63241]: _type = "Task" [ 2318.376186] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2318.384837] env[63241]: DEBUG oslo_vmware.api [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821483, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2318.886211] env[63241]: DEBUG oslo_vmware.api [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821483, 'name': ReconfigVM_Task, 'duration_secs': 0.140468} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2318.886686] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance '0b5dcad5-2877-44bd-b438-a2f88dbc2ef4' progress to 33 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2319.392591] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2319.392834] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2319.392971] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2319.393177] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2319.393334] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2319.393478] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2319.393681] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2319.393879] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2319.394113] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2319.394287] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2319.394457] env[63241]: DEBUG nova.virt.hardware [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2319.399763] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2319.400050] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc2df261-1a95-4847-b82c-ca6290fc177b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.418230] env[63241]: DEBUG oslo_vmware.api [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2319.418230] env[63241]: value = "task-1821484" [ 2319.418230] env[63241]: _type = "Task" [ 2319.418230] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2319.425772] env[63241]: DEBUG oslo_vmware.api [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821484, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2319.928806] env[63241]: DEBUG oslo_vmware.api [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821484, 'name': ReconfigVM_Task, 'duration_secs': 0.15867} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2319.929254] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2319.929818] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b690fda-b5e5-4a8a-95a5-eb69d3b8d7aa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.954279] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4/0b5dcad5-2877-44bd-b438-a2f88dbc2ef4.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2319.954819] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e749cfb2-d17b-4336-91bb-16098cefb7e9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.971640] env[63241]: DEBUG oslo_vmware.api [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2319.971640] env[63241]: value = "task-1821485" [ 2319.971640] env[63241]: _type = "Task" [ 2319.971640] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2319.978804] env[63241]: DEBUG oslo_vmware.api [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821485, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2320.481716] env[63241]: DEBUG oslo_vmware.api [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821485, 'name': ReconfigVM_Task, 'duration_secs': 0.246818} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2320.482016] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4/0b5dcad5-2877-44bd-b438-a2f88dbc2ef4.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2320.482290] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance '0b5dcad5-2877-44bd-b438-a2f88dbc2ef4' progress to 50 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2320.989139] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a0f19e-0478-4f19-b22e-df3f53e844f2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.007769] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e460f9-5fbc-4054-b5d9-9875ef4aa63a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.025368] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance '0b5dcad5-2877-44bd-b438-a2f88dbc2ef4' progress to 67 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2321.572111] env[63241]: DEBUG nova.network.neutron [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Port 84a13e6f-bea2-418e-a1cc-ece38c4263d5 binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2322.593675] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2322.594154] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2322.594154] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2323.628210] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2323.628470] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2323.628568] env[63241]: DEBUG nova.network.neutron [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2324.324843] env[63241]: DEBUG nova.network.neutron [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance_info_cache with network_info: [{"id": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "address": "fa:16:3e:55:ba:26", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84a13e6f-be", "ovs_interfaceid": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2324.827694] env[63241]: DEBUG oslo_concurrency.lockutils [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2325.354766] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a4225a-b25a-4e81-a3e8-d8a4e707ee2b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.374560] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a34f29c-b3f4-4e55-ba82-afff4ba7033b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.381459] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance '0b5dcad5-2877-44bd-b438-a2f88dbc2ef4' progress to 83 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2325.888278] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-94beb3f2-8b5a-49fe-afa2-6c8723ae822d tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance '0b5dcad5-2877-44bd-b438-a2f88dbc2ef4' progress to 100 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2328.381066] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.381427] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2328.381542] env[63241]: DEBUG nova.compute.manager [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Going to confirm migration 9 {{(pid=63241) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2328.917193] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2328.917384] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2328.917561] env[63241]: DEBUG nova.network.neutron [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2328.917742] env[63241]: DEBUG nova.objects.instance [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'info_cache' on Instance uuid 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2330.135208] env[63241]: DEBUG nova.network.neutron [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance_info_cache with network_info: [{"id": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "address": "fa:16:3e:55:ba:26", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84a13e6f-be", "ovs_interfaceid": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2330.638444] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2330.638742] env[63241]: DEBUG nova.objects.instance [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'migration_context' on Instance uuid 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2331.141702] env[63241]: DEBUG nova.objects.base [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Object Instance<0b5dcad5-2877-44bd-b438-a2f88dbc2ef4> lazy-loaded attributes: info_cache,migration_context {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2331.142713] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31df6b87-8abc-432b-b68d-1e74ac2cc9b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.161508] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe82167c-41c6-48d4-a29c-902687e6dc9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.167011] env[63241]: DEBUG oslo_vmware.api [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2331.167011] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52054179-2d7d-3862-8a24-fa899220aaf5" [ 2331.167011] env[63241]: _type = "Task" [ 2331.167011] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.174299] env[63241]: DEBUG oslo_vmware.api [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52054179-2d7d-3862-8a24-fa899220aaf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.677575] env[63241]: DEBUG oslo_vmware.api [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52054179-2d7d-3862-8a24-fa899220aaf5, 'name': SearchDatastore_Task, 'duration_secs': 0.009386} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2331.677843] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2331.678085] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2332.216942] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1543ef8e-1821-4817-b56a-28de1e0b3213 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.224655] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4b3aed-26ae-4060-ba57-8246efc62e85 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.253106] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4551e7-2d96-4ecd-a44c-9389699c1590 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.259826] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f8075d-2c95-402d-8b33-7ef04277bc5f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.273365] env[63241]: DEBUG nova.compute.provider_tree [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2332.776785] env[63241]: DEBUG nova.scheduler.client.report [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2333.787388] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.109s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.787891] env[63241]: DEBUG nova.compute.manager [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=63241) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4910}} [ 2334.345797] env[63241]: INFO nova.scheduler.client.report [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleted allocation for migration cc83bcd7-e9a7-4039-bbc4-3328b3e2141c [ 2334.852758] env[63241]: DEBUG oslo_concurrency.lockutils [None req-7f593859-7cec-4acd-a1f7-4eb1dcdfe4ca tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.471s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.234645] env[63241]: DEBUG nova.objects.instance [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'flavor' on Instance uuid 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2335.739513] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2335.739671] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2335.739852] env[63241]: DEBUG nova.network.neutron [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2335.740109] env[63241]: DEBUG nova.objects.instance [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'info_cache' on Instance uuid 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2336.243446] env[63241]: DEBUG nova.objects.base [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Object Instance<0b5dcad5-2877-44bd-b438-a2f88dbc2ef4> lazy-loaded attributes: flavor,info_cache {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2336.976276] env[63241]: DEBUG nova.network.neutron [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance_info_cache with network_info: [{"id": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "address": "fa:16:3e:55:ba:26", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84a13e6f-be", "ovs_interfaceid": "84a13e6f-bea2-418e-a1cc-ece38c4263d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2337.479714] env[63241]: DEBUG oslo_concurrency.lockutils [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2337.983495] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2337.983880] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0841e36-065c-48de-a38f-33c1e11de8cd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.992761] env[63241]: DEBUG oslo_vmware.api [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2337.992761] env[63241]: value = "task-1821486" [ 2337.992761] env[63241]: _type = "Task" [ 2337.992761] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2338.000367] env[63241]: DEBUG oslo_vmware.api [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821486, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2338.503264] env[63241]: DEBUG oslo_vmware.api [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821486, 'name': PowerOnVM_Task, 'duration_secs': 0.345748} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2338.503716] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2338.503716] env[63241]: DEBUG nova.compute.manager [None req-6646281a-11f4-47c6-9f24-eefa0a2717c4 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2338.504432] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e157a03-2664-4867-ae32-622c92f62b8b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.787916] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2339.788357] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2339.788424] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2339.788564] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2339.788739] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2339.790917] env[63241]: INFO nova.compute.manager [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Terminating instance [ 2339.792731] env[63241]: DEBUG nova.compute.manager [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2339.792935] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2339.793788] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197b4679-3456-4396-a8a0-e1155e7284d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.801778] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2339.801989] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1eaa97d8-2a51-4903-99da-8279cfd19aa5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.808730] env[63241]: DEBUG oslo_vmware.api [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2339.808730] env[63241]: value = "task-1821487" [ 2339.808730] env[63241]: _type = "Task" [ 2339.808730] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2339.816274] env[63241]: DEBUG oslo_vmware.api [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2340.318491] env[63241]: DEBUG oslo_vmware.api [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821487, 'name': PowerOffVM_Task, 'duration_secs': 0.148578} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2340.318751] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2340.318926] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2340.319183] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e85d4631-ab08-477a-b708-117092450e9e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.393157] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2340.393303] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2340.393495] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleting the datastore file [datastore1] 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2340.393795] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fb05cd4-e8c2-45b7-93fe-731d188a0228 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.400030] env[63241]: DEBUG oslo_vmware.api [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2340.400030] env[63241]: value = "task-1821489" [ 2340.400030] env[63241]: _type = "Task" [ 2340.400030] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2340.407273] env[63241]: DEBUG oslo_vmware.api [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821489, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2340.910378] env[63241]: DEBUG oslo_vmware.api [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821489, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138867} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2340.910732] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2340.910937] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2340.911196] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2340.911413] env[63241]: INFO nova.compute.manager [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2340.911694] env[63241]: DEBUG oslo.service.loopingcall [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2340.911911] env[63241]: DEBUG nova.compute.manager [-] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2340.912047] env[63241]: DEBUG nova.network.neutron [-] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2341.355695] env[63241]: DEBUG nova.compute.manager [req-9d57bb00-4515-4c5d-8878-e4fe09a4fea0 req-d35ea8dd-0664-4557-be49-524e7ce860b8 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Received event network-vif-deleted-84a13e6f-bea2-418e-a1cc-ece38c4263d5 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2341.355953] env[63241]: INFO nova.compute.manager [req-9d57bb00-4515-4c5d-8878-e4fe09a4fea0 req-d35ea8dd-0664-4557-be49-524e7ce860b8 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Neutron deleted interface 84a13e6f-bea2-418e-a1cc-ece38c4263d5; detaching it from the instance and deleting it from the info cache [ 2341.356155] env[63241]: DEBUG nova.network.neutron [req-9d57bb00-4515-4c5d-8878-e4fe09a4fea0 req-d35ea8dd-0664-4557-be49-524e7ce860b8 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2341.804780] env[63241]: DEBUG nova.network.neutron [-] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2341.858926] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49eb5fcd-d6a2-4be8-8c8d-32b43dc8bf30 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.868597] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08706d5-9bd8-458d-94e7-a088bb81770d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.891169] env[63241]: DEBUG nova.compute.manager [req-9d57bb00-4515-4c5d-8878-e4fe09a4fea0 req-d35ea8dd-0664-4557-be49-524e7ce860b8 service nova] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Detach interface failed, port_id=84a13e6f-bea2-418e-a1cc-ece38c4263d5, reason: Instance 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2342.309518] env[63241]: INFO nova.compute.manager [-] [instance: 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4] Took 1.40 seconds to deallocate network for instance. [ 2342.815918] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.816162] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2342.816358] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2342.838146] env[63241]: INFO nova.scheduler.client.report [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleted allocations for instance 0b5dcad5-2877-44bd-b438-a2f88dbc2ef4 [ 2343.345875] env[63241]: DEBUG oslo_concurrency.lockutils [None req-5e537582-52ba-48a4-81e6-259978c93821 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "0b5dcad5-2877-44bd-b438-a2f88dbc2ef4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.558s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2344.595764] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.596088] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2345.100057] env[63241]: DEBUG nova.compute.manager [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2345.621168] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2345.621444] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2345.622953] env[63241]: INFO nova.compute.claims [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2346.658855] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe8c726-7bb0-4b61-8971-17b03f589823 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.666202] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d919d25f-3fe2-49e3-9e62-f2d49476e3f5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.695128] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a2f85a-6658-489c-8b0c-785ce1010a7f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.702019] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc589324-d070-40ec-88e2-cb1f4aa62a1b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.714513] env[63241]: DEBUG nova.compute.provider_tree [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2347.235192] env[63241]: ERROR nova.scheduler.client.report [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [req-98f37aa9-541e-4e64-9599-d9f23ed09f35] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-98f37aa9-541e-4e64-9599-d9f23ed09f35"}]} [ 2347.250527] env[63241]: DEBUG nova.scheduler.client.report [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2347.264576] env[63241]: DEBUG nova.scheduler.client.report [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2347.264808] env[63241]: DEBUG nova.compute.provider_tree [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2347.274669] env[63241]: DEBUG nova.scheduler.client.report [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2347.290746] env[63241]: DEBUG nova.scheduler.client.report [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2347.312738] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b9a158-d8f3-427d-89de-80d98f99385e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.320284] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b947822-1f0e-4d1c-a9ed-4263d61cec8a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.350203] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6849ce38-55b0-422a-9e2e-4bfacfdfe806 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.357320] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9797d91c-7797-4fb7-b94b-f3a5b6642966 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.370063] env[63241]: DEBUG nova.compute.provider_tree [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2347.898622] env[63241]: DEBUG nova.scheduler.client.report [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 195 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2347.898981] env[63241]: DEBUG nova.compute.provider_tree [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 195 to 196 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2347.899121] env[63241]: DEBUG nova.compute.provider_tree [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2348.404565] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.783s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2348.405112] env[63241]: DEBUG nova.compute.manager [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2348.910712] env[63241]: DEBUG nova.compute.utils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2348.912171] env[63241]: DEBUG nova.compute.manager [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2348.912345] env[63241]: DEBUG nova.network.neutron [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2348.968526] env[63241]: DEBUG nova.policy [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ecf711bb36ca4235920b16674379d0d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39ea2ef9af4742768fc75e7a839b0416', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 2349.240137] env[63241]: DEBUG nova.network.neutron [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Successfully created port: 790566e9-9e07-4ae3-ab15-d1ef783fe49e {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2349.415150] env[63241]: DEBUG nova.compute.manager [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2350.425528] env[63241]: DEBUG nova.compute.manager [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2350.452482] env[63241]: DEBUG nova.virt.hardware [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2350.452739] env[63241]: DEBUG nova.virt.hardware [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2350.452900] env[63241]: DEBUG nova.virt.hardware [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2350.453098] env[63241]: DEBUG nova.virt.hardware [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2350.453248] env[63241]: DEBUG nova.virt.hardware [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2350.453394] env[63241]: DEBUG nova.virt.hardware [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2350.453600] env[63241]: DEBUG nova.virt.hardware [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2350.453772] env[63241]: DEBUG nova.virt.hardware [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2350.453927] env[63241]: DEBUG nova.virt.hardware [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2350.454100] env[63241]: DEBUG nova.virt.hardware [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2350.454276] env[63241]: DEBUG nova.virt.hardware [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2350.455145] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf89762-3536-40a8-bf44-e112171aae76 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.463219] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cff3ff-c417-410a-828a-465b4fc5ba2d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.466932] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2350.675714] env[63241]: DEBUG nova.compute.manager [req-3e4cc679-0b87-4468-ac8a-d708e6ce889a req-0835c331-8acc-4ee4-a7c6-35c634bcd075 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Received event network-vif-plugged-790566e9-9e07-4ae3-ab15-d1ef783fe49e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2350.675981] env[63241]: DEBUG oslo_concurrency.lockutils [req-3e4cc679-0b87-4468-ac8a-d708e6ce889a req-0835c331-8acc-4ee4-a7c6-35c634bcd075 service nova] Acquiring lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2350.676497] env[63241]: DEBUG oslo_concurrency.lockutils [req-3e4cc679-0b87-4468-ac8a-d708e6ce889a req-0835c331-8acc-4ee4-a7c6-35c634bcd075 service nova] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2350.676682] env[63241]: DEBUG oslo_concurrency.lockutils [req-3e4cc679-0b87-4468-ac8a-d708e6ce889a req-0835c331-8acc-4ee4-a7c6-35c634bcd075 service nova] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2350.676852] env[63241]: DEBUG nova.compute.manager [req-3e4cc679-0b87-4468-ac8a-d708e6ce889a req-0835c331-8acc-4ee4-a7c6-35c634bcd075 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] No waiting events found dispatching network-vif-plugged-790566e9-9e07-4ae3-ab15-d1ef783fe49e {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2350.677092] env[63241]: WARNING nova.compute.manager [req-3e4cc679-0b87-4468-ac8a-d708e6ce889a req-0835c331-8acc-4ee4-a7c6-35c634bcd075 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Received unexpected event network-vif-plugged-790566e9-9e07-4ae3-ab15-d1ef783fe49e for instance with vm_state building and task_state spawning. [ 2350.756143] env[63241]: DEBUG nova.network.neutron [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Successfully updated port: 790566e9-9e07-4ae3-ab15-d1ef783fe49e {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2350.970431] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Getting list of instances from cluster (obj){ [ 2350.970431] env[63241]: value = "domain-c8" [ 2350.970431] env[63241]: _type = "ClusterComputeResource" [ 2350.970431] env[63241]: } {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2350.971522] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ece80b-b45a-402a-93b4-342cf784f6a0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.979867] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Got total of 0 instances {{(pid=63241) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2350.980028] env[63241]: WARNING nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] While synchronizing instance power states, found 1 instances in the database and 0 instances on the hypervisor. [ 2350.980172] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Triggering sync for uuid 34a55ddd-ed2c-4f7a-8080-a0c9d771925d {{(pid=63241) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 2350.980495] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2351.258970] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2351.258970] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2351.259126] env[63241]: DEBUG nova.network.neutron [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2351.789350] env[63241]: DEBUG nova.network.neutron [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2351.906719] env[63241]: DEBUG nova.network.neutron [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance_info_cache with network_info: [{"id": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "address": "fa:16:3e:6d:57:09", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790566e9-9e", "ovs_interfaceid": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2352.409621] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2352.410050] env[63241]: DEBUG nova.compute.manager [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Instance network_info: |[{"id": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "address": "fa:16:3e:6d:57:09", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790566e9-9e", "ovs_interfaceid": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2352.410536] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:57:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '790566e9-9e07-4ae3-ab15-d1ef783fe49e', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2352.418457] env[63241]: DEBUG oslo.service.loopingcall [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2352.418625] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2352.418850] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d765e58e-8413-4cf5-b995-83d2ca556b06 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.439894] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2352.439894] env[63241]: value = "task-1821490" [ 2352.439894] env[63241]: _type = "Task" [ 2352.439894] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2352.447198] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821490, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2352.705609] env[63241]: DEBUG nova.compute.manager [req-a2229553-92f8-452a-a151-7d7f3208f760 req-da9fa31d-28f4-4860-aed6-33e747a1ea53 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Received event network-changed-790566e9-9e07-4ae3-ab15-d1ef783fe49e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2352.705837] env[63241]: DEBUG nova.compute.manager [req-a2229553-92f8-452a-a151-7d7f3208f760 req-da9fa31d-28f4-4860-aed6-33e747a1ea53 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Refreshing instance network info cache due to event network-changed-790566e9-9e07-4ae3-ab15-d1ef783fe49e. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2352.706077] env[63241]: DEBUG oslo_concurrency.lockutils [req-a2229553-92f8-452a-a151-7d7f3208f760 req-da9fa31d-28f4-4860-aed6-33e747a1ea53 service nova] Acquiring lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2352.706211] env[63241]: DEBUG oslo_concurrency.lockutils [req-a2229553-92f8-452a-a151-7d7f3208f760 req-da9fa31d-28f4-4860-aed6-33e747a1ea53 service nova] Acquired lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2352.706378] env[63241]: DEBUG nova.network.neutron [req-a2229553-92f8-452a-a151-7d7f3208f760 req-da9fa31d-28f4-4860-aed6-33e747a1ea53 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Refreshing network info cache for port 790566e9-9e07-4ae3-ab15-d1ef783fe49e {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2352.949253] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821490, 'name': CreateVM_Task, 'duration_secs': 0.316946} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2352.949638] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2352.950076] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2352.950248] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2352.950586] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2352.950835] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7f5ddac-eab2-4d07-becd-97f6767d87bf {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2352.955041] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2352.955041] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5208fcf0-3713-149e-2835-8fbda338e480" [ 2352.955041] env[63241]: _type = "Task" [ 2352.955041] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2352.962162] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2352.962385] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5208fcf0-3713-149e-2835-8fbda338e480, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.385933] env[63241]: DEBUG nova.network.neutron [req-a2229553-92f8-452a-a151-7d7f3208f760 req-da9fa31d-28f4-4860-aed6-33e747a1ea53 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updated VIF entry in instance network info cache for port 790566e9-9e07-4ae3-ab15-d1ef783fe49e. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2353.386378] env[63241]: DEBUG nova.network.neutron [req-a2229553-92f8-452a-a151-7d7f3208f760 req-da9fa31d-28f4-4860-aed6-33e747a1ea53 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance_info_cache with network_info: [{"id": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "address": "fa:16:3e:6d:57:09", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790566e9-9e", "ovs_interfaceid": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2353.466677] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5208fcf0-3713-149e-2835-8fbda338e480, 'name': SearchDatastore_Task, 'duration_secs': 0.011494} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2353.466977] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2353.467225] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2353.467464] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2353.467618] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2353.467802] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2353.468066] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cb17400-bea8-4ea1-9a7d-363fc40393c3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.476386] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2353.476548] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2353.477214] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b516410b-3e33-4d95-9153-8fe0fa614953 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.481846] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2353.481846] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]527c371c-a2fe-498f-cdc4-617acf3e7f1b" [ 2353.481846] env[63241]: _type = "Task" [ 2353.481846] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2353.488841] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527c371c-a2fe-498f-cdc4-617acf3e7f1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.889492] env[63241]: DEBUG oslo_concurrency.lockutils [req-a2229553-92f8-452a-a151-7d7f3208f760 req-da9fa31d-28f4-4860-aed6-33e747a1ea53 service nova] Releasing lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2353.992137] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]527c371c-a2fe-498f-cdc4-617acf3e7f1b, 'name': SearchDatastore_Task, 'duration_secs': 0.008433} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2353.992919] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1605ade-bfff-41f3-adcb-da0e3eab791f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.997926] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2353.997926] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]526f8432-5fe3-68f2-9281-52845eedb853" [ 2353.997926] env[63241]: _type = "Task" [ 2353.997926] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2354.004957] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526f8432-5fe3-68f2-9281-52845eedb853, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.451583] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2354.451833] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2354.508667] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]526f8432-5fe3-68f2-9281-52845eedb853, 'name': SearchDatastore_Task, 'duration_secs': 0.009656} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2354.508916] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2354.509187] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 34a55ddd-ed2c-4f7a-8080-a0c9d771925d/34a55ddd-ed2c-4f7a-8080-a0c9d771925d.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2354.509433] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba6e2c42-dd56-4476-bea7-50c4a3f4bb91 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.515911] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2354.515911] env[63241]: value = "task-1821491" [ 2354.515911] env[63241]: _type = "Task" [ 2354.515911] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2354.523172] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2355.025886] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821491, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4029} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2355.026361] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] 34a55ddd-ed2c-4f7a-8080-a0c9d771925d/34a55ddd-ed2c-4f7a-8080-a0c9d771925d.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2355.026361] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2355.026626] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-05a6953e-bfad-4287-bf9c-6f5d46079033 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.032981] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2355.032981] env[63241]: value = "task-1821492" [ 2355.032981] env[63241]: _type = "Task" [ 2355.032981] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2355.041733] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821492, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2355.451921] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.542748] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821492, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.056992} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2355.543080] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2355.543924] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34471c30-bb53-4f33-9f9d-99763d4cc6b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.565348] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 34a55ddd-ed2c-4f7a-8080-a0c9d771925d/34a55ddd-ed2c-4f7a-8080-a0c9d771925d.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2355.565576] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d017dbb1-9a23-4a7a-a54c-bae689058067 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.584749] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2355.584749] env[63241]: value = "task-1821493" [ 2355.584749] env[63241]: _type = "Task" [ 2355.584749] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2355.591979] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821493, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.094581] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821493, 'name': ReconfigVM_Task, 'duration_secs': 0.260593} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2356.094954] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 34a55ddd-ed2c-4f7a-8080-a0c9d771925d/34a55ddd-ed2c-4f7a-8080-a0c9d771925d.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2356.095587] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3935821e-3112-4af9-aec5-65deb67d3fb5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.101989] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2356.101989] env[63241]: value = "task-1821494" [ 2356.101989] env[63241]: _type = "Task" [ 2356.101989] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2356.109200] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821494, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.611702] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821494, 'name': Rename_Task, 'duration_secs': 0.127162} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2356.612104] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2356.612410] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9521485-2d9a-453c-ac46-b42e7fb2411a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.619858] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2356.619858] env[63241]: value = "task-1821495" [ 2356.619858] env[63241]: _type = "Task" [ 2356.619858] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2356.628564] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821495, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2357.129884] env[63241]: DEBUG oslo_vmware.api [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821495, 'name': PowerOnVM_Task, 'duration_secs': 0.420574} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2357.130248] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2357.130413] env[63241]: INFO nova.compute.manager [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Took 6.70 seconds to spawn the instance on the hypervisor. [ 2357.130590] env[63241]: DEBUG nova.compute.manager [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2357.131391] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3c02b4-0f42-4f6f-9cd5-94d789689cbd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.653484] env[63241]: INFO nova.compute.manager [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Took 12.05 seconds to build instance. [ 2358.156466] env[63241]: DEBUG oslo_concurrency.lockutils [None req-257929cb-0010-4762-9a67-4fce2206ed96 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.560s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2358.156811] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.176s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2358.156930] env[63241]: INFO nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] During sync_power_state the instance has a pending task (spawning). Skip. [ 2358.157112] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2358.707816] env[63241]: DEBUG nova.compute.manager [req-b648a5fd-f81d-4851-bbbe-18c769afec49 req-9a282c0a-7fa4-45de-b722-a5b17c274de1 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Received event network-changed-790566e9-9e07-4ae3-ab15-d1ef783fe49e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2358.708045] env[63241]: DEBUG nova.compute.manager [req-b648a5fd-f81d-4851-bbbe-18c769afec49 req-9a282c0a-7fa4-45de-b722-a5b17c274de1 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Refreshing instance network info cache due to event network-changed-790566e9-9e07-4ae3-ab15-d1ef783fe49e. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2358.708242] env[63241]: DEBUG oslo_concurrency.lockutils [req-b648a5fd-f81d-4851-bbbe-18c769afec49 req-9a282c0a-7fa4-45de-b722-a5b17c274de1 service nova] Acquiring lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2358.708431] env[63241]: DEBUG oslo_concurrency.lockutils [req-b648a5fd-f81d-4851-bbbe-18c769afec49 req-9a282c0a-7fa4-45de-b722-a5b17c274de1 service nova] Acquired lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2358.708578] env[63241]: DEBUG nova.network.neutron [req-b648a5fd-f81d-4851-bbbe-18c769afec49 req-9a282c0a-7fa4-45de-b722-a5b17c274de1 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Refreshing network info cache for port 790566e9-9e07-4ae3-ab15-d1ef783fe49e {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2359.414899] env[63241]: DEBUG nova.network.neutron [req-b648a5fd-f81d-4851-bbbe-18c769afec49 req-9a282c0a-7fa4-45de-b722-a5b17c274de1 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updated VIF entry in instance network info cache for port 790566e9-9e07-4ae3-ab15-d1ef783fe49e. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2359.415313] env[63241]: DEBUG nova.network.neutron [req-b648a5fd-f81d-4851-bbbe-18c769afec49 req-9a282c0a-7fa4-45de-b722-a5b17c274de1 service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance_info_cache with network_info: [{"id": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "address": "fa:16:3e:6d:57:09", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790566e9-9e", "ovs_interfaceid": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2359.451827] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.451988] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2359.452092] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2359.917787] env[63241]: DEBUG oslo_concurrency.lockutils [req-b648a5fd-f81d-4851-bbbe-18c769afec49 req-9a282c0a-7fa4-45de-b722-a5b17c274de1 service nova] Releasing lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2359.981919] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2359.982068] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2359.982227] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2359.982445] env[63241]: DEBUG nova.objects.instance [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lazy-loading 'info_cache' on Instance uuid 34a55ddd-ed2c-4f7a-8080-a0c9d771925d {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2361.689548] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance_info_cache with network_info: [{"id": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "address": "fa:16:3e:6d:57:09", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790566e9-9e", "ovs_interfaceid": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2362.192258] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2362.192496] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2362.192687] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2363.452162] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2363.452646] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2367.452467] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2370.451561] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2370.955268] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.955531] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.955700] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.955861] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2370.956763] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d9d8a1-cebb-40ae-84b3-383ef0551f54 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.965174] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caeed37f-4f7b-40c0-abf7-217c888b2aae {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.978770] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c20022-6e7d-488d-b757-716970c1f26c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.985229] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705735cc-a525-4f48-b5fd-55ac9e802358 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.013106] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180809MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2371.013247] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2371.013389] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2372.126407] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 34a55ddd-ed2c-4f7a-8080-a0c9d771925d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2372.126734] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2372.126804] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2372.156211] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b4c056-5223-4893-937e-828a63461304 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.163463] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa4fb6f-2299-4ac3-8d40-a10129a65e62 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.193676] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0393f390-8ba1-412b-a271-2193da02eb08 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.200503] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6f9570-ded3-4747-a183-573afa4364b0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.213265] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2372.761319] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 196 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2372.761593] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 196 to 197 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2372.761708] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2373.266934] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2373.267349] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.254s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2396.989465] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2396.989782] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2397.493444] env[63241]: DEBUG nova.compute.utils [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2397.997139] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2399.060072] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2399.060578] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2399.060578] env[63241]: INFO nova.compute.manager [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Attaching volume ed97d171-dfe5-4441-b476-b2f6f934074e to /dev/sdb [ 2399.091695] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e9c25a-093a-4977-9b55-c5295cb3c591 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.098868] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0634e129-00d8-4183-8dc5-12d1b87a5af0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.111708] env[63241]: DEBUG nova.virt.block_device [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating existing volume attachment record: c0572036-1f6c-4e12-81ab-0c98f077cde4 {{(pid=63241) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2403.653667] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Volume attach. Driver type: vmdk {{(pid=63241) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2403.653970] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377257', 'volume_id': 'ed97d171-dfe5-4441-b476-b2f6f934074e', 'name': 'volume-ed97d171-dfe5-4441-b476-b2f6f934074e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '34a55ddd-ed2c-4f7a-8080-a0c9d771925d', 'attached_at': '', 'detached_at': '', 'volume_id': 'ed97d171-dfe5-4441-b476-b2f6f934074e', 'serial': 'ed97d171-dfe5-4441-b476-b2f6f934074e'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2403.654840] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99544dd6-6c89-452b-a5a6-359960d017af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.671710] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c78e673-85f6-4c71-b9b9-f9c29b88996d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.695176] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-ed97d171-dfe5-4441-b476-b2f6f934074e/volume-ed97d171-dfe5-4441-b476-b2f6f934074e.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2403.695408] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-106a7f5b-1ec6-4a5c-bfd4-9ece23d2b5e2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.712655] env[63241]: DEBUG oslo_vmware.api [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2403.712655] env[63241]: value = "task-1821500" [ 2403.712655] env[63241]: _type = "Task" [ 2403.712655] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2403.719911] env[63241]: DEBUG oslo_vmware.api [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821500, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2404.222096] env[63241]: DEBUG oslo_vmware.api [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821500, 'name': ReconfigVM_Task, 'duration_secs': 0.331533} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2404.222379] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-ed97d171-dfe5-4441-b476-b2f6f934074e/volume-ed97d171-dfe5-4441-b476-b2f6f934074e.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2404.227031] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11ccbe5d-35c9-407a-a038-f56948c5e922 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.241925] env[63241]: DEBUG oslo_vmware.api [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2404.241925] env[63241]: value = "task-1821501" [ 2404.241925] env[63241]: _type = "Task" [ 2404.241925] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2404.250516] env[63241]: DEBUG oslo_vmware.api [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821501, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2404.752631] env[63241]: DEBUG oslo_vmware.api [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821501, 'name': ReconfigVM_Task, 'duration_secs': 0.127229} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2404.752926] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377257', 'volume_id': 'ed97d171-dfe5-4441-b476-b2f6f934074e', 'name': 'volume-ed97d171-dfe5-4441-b476-b2f6f934074e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '34a55ddd-ed2c-4f7a-8080-a0c9d771925d', 'attached_at': '', 'detached_at': '', 'volume_id': 'ed97d171-dfe5-4441-b476-b2f6f934074e', 'serial': 'ed97d171-dfe5-4441-b476-b2f6f934074e'} {{(pid=63241) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2405.788757] env[63241]: DEBUG nova.objects.instance [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'flavor' on Instance uuid 34a55ddd-ed2c-4f7a-8080-a0c9d771925d {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2406.293791] env[63241]: DEBUG oslo_concurrency.lockutils [None req-3f53b324-b1cd-47ef-8910-c345b014781c tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.234s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2407.174213] env[63241]: DEBUG nova.compute.manager [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Stashing vm_state: active {{(pid=63241) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2407.696886] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2407.697197] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2408.202551] env[63241]: INFO nova.compute.claims [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2408.708497] env[63241]: INFO nova.compute.resource_tracker [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating resource usage from migration 3672a3d7-5728-41b6-85f1-4f59f2426932 [ 2408.744795] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c88def8-b4fe-4b1e-a5e5-de6a228c5da7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.752052] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695b480f-32e7-4b9e-a67a-55cb4eed3f0b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.782116] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25535957-23f5-4ed7-bde7-bcbb3100101a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.788835] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b16b9c-6b8a-4c3c-97be-66656a876813 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.801472] env[63241]: DEBUG nova.compute.provider_tree [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2409.304753] env[63241]: DEBUG nova.scheduler.client.report [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2409.810262] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.113s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2409.810518] env[63241]: INFO nova.compute.manager [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Migrating [ 2410.326058] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2410.326421] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2410.326467] env[63241]: DEBUG nova.network.neutron [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2411.229094] env[63241]: DEBUG nova.network.neutron [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance_info_cache with network_info: [{"id": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "address": "fa:16:3e:6d:57:09", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790566e9-9e", "ovs_interfaceid": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2411.732437] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2413.250684] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f37687-5713-45ae-ae1c-61ab85f021f1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.287497] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance '34a55ddd-ed2c-4f7a-8080-a0c9d771925d' progress to 0 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2413.796061] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2413.796061] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abf66467-7dbf-4d5b-9d2f-4ffc87b6d0df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.803999] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2413.803999] env[63241]: value = "task-1821502" [ 2413.803999] env[63241]: _type = "Task" [ 2413.803999] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2413.811607] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821502, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2414.314605] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821502, 'name': PowerOffVM_Task, 'duration_secs': 0.172529} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2414.314997] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2414.315073] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance '34a55ddd-ed2c-4f7a-8080-a0c9d771925d' progress to 17 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2414.821302] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2414.821558] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2414.821720] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2414.821960] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2414.822132] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2414.822285] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2414.822505] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2414.822668] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2414.822836] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2414.823008] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2414.823193] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2414.829027] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03ada171-07da-4b55-b6ce-dc5c1e93b86a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.844030] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2414.844030] env[63241]: value = "task-1821503" [ 2414.844030] env[63241]: _type = "Task" [ 2414.844030] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2414.852168] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821503, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2415.262775] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2415.354138] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821503, 'name': ReconfigVM_Task, 'duration_secs': 0.162067} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2415.354537] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance '34a55ddd-ed2c-4f7a-8080-a0c9d771925d' progress to 33 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2415.861645] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2415.861924] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2415.862089] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2415.862289] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2415.862437] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2415.862586] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2415.862789] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2415.862953] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2415.863143] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2415.863311] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2415.863484] env[63241]: DEBUG nova.virt.hardware [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2415.868831] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfiguring VM instance instance-0000007c to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2415.869159] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f858e046-40e0-4b08-b303-66983fbaf93f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.888971] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2415.888971] env[63241]: value = "task-1821504" [ 2415.888971] env[63241]: _type = "Task" [ 2415.888971] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2415.896636] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821504, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.398710] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821504, 'name': ReconfigVM_Task, 'duration_secs': 0.181677} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2416.399059] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfigured VM instance instance-0000007c to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2416.399682] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8f40ce-6076-49ae-9686-2588b2bc113a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.425248] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 34a55ddd-ed2c-4f7a-8080-a0c9d771925d/34a55ddd-ed2c-4f7a-8080-a0c9d771925d.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2416.425782] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6db3fb38-2480-48f3-9056-11861cd5e336 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.443050] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2416.443050] env[63241]: value = "task-1821505" [ 2416.443050] env[63241]: _type = "Task" [ 2416.443050] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2416.450359] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821505, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2416.450708] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2416.450890] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2416.451042] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2416.953382] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821505, 'name': ReconfigVM_Task, 'duration_secs': 0.262665} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2416.953601] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 34a55ddd-ed2c-4f7a-8080-a0c9d771925d/34a55ddd-ed2c-4f7a-8080-a0c9d771925d.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2416.953922] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance '34a55ddd-ed2c-4f7a-8080-a0c9d771925d' progress to 50 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2417.460824] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feacdd60-b7c7-46f7-8774-b5219049afa1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.481789] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8770c0fc-cc36-4d15-ab1d-00c234ec6a22 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.500819] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance '34a55ddd-ed2c-4f7a-8080-a0c9d771925d' progress to 67 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2419.135971] env[63241]: DEBUG nova.network.neutron [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Port 790566e9-9e07-4ae3-ab15-d1ef783fe49e binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2420.160793] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2420.161201] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2420.161201] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2420.451958] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2420.452243] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2420.452243] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2420.955513] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2420.955747] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2420.955816] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2420.955922] env[63241]: DEBUG nova.objects.instance [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lazy-loading 'info_cache' on Instance uuid 34a55ddd-ed2c-4f7a-8080-a0c9d771925d {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2421.198558] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2422.736214] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance_info_cache with network_info: [{"id": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "address": "fa:16:3e:6d:57:09", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790566e9-9e", "ovs_interfaceid": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2423.239317] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2423.239526] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2423.239836] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2423.239961] env[63241]: DEBUG nova.network.neutron [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2423.241137] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2423.940480] env[63241]: DEBUG nova.network.neutron [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance_info_cache with network_info: [{"id": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "address": "fa:16:3e:6d:57:09", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790566e9-9e", "ovs_interfaceid": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2424.443088] env[63241]: DEBUG oslo_concurrency.lockutils [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2424.451805] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2424.451947] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2424.952534] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd475ba-c680-4be7-9543-abc64f86eca9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2424.959686] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea775419-1f5e-44ca-9ea4-b8c7cf239116 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.065784] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55cab09b-803f-43a5-8d7d-94a44d3b9b43 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.087847] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b037ae6-5960-4ae5-99bd-b414a7284991 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.094353] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance '34a55ddd-ed2c-4f7a-8080-a0c9d771925d' progress to 83 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2426.600734] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2426.601063] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0886b25e-e8fb-4457-92e4-c065bc951386 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.608093] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2426.608093] env[63241]: value = "task-1821506" [ 2426.608093] env[63241]: _type = "Task" [ 2426.608093] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2426.615497] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821506, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2427.117557] env[63241]: DEBUG oslo_vmware.api [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821506, 'name': PowerOnVM_Task, 'duration_secs': 0.372235} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2427.117909] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2427.118028] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-a2eff6c3-696e-4d3b-9bf1-fdbb34d076c2 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance '34a55ddd-ed2c-4f7a-8080-a0c9d771925d' progress to 100 {{(pid=63241) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2428.447157] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2429.452243] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2429.947742] env[63241]: DEBUG nova.network.neutron [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Port 790566e9-9e07-4ae3-ab15-d1ef783fe49e binding to destination host cpu-1 is already ACTIVE {{(pid=63241) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2429.948009] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2429.948175] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2429.948337] env[63241]: DEBUG nova.network.neutron [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2430.452725] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2430.694245] env[63241]: DEBUG nova.network.neutron [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance_info_cache with network_info: [{"id": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "address": "fa:16:3e:6d:57:09", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790566e9-9e", "ovs_interfaceid": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2430.955355] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2430.955628] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2430.955747] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2430.955905] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2430.956807] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed6565f-03de-40da-b4e1-5a1a5efc82a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.965429] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053a1ea3-dba1-47a1-914a-4a737d527c18 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.979375] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e057e0a-25a0-4901-bb59-c61e670af7d2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.985585] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fec4c0e-699e-4615-a5be-50999edcd5c2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.013165] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180941MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2431.013311] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2431.013495] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2431.197368] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2431.701333] env[63241]: DEBUG nova.compute.manager [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63241) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 2432.020202] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Applying migration context for instance 34a55ddd-ed2c-4f7a-8080-a0c9d771925d as it has an incoming, in-progress migration 3672a3d7-5728-41b6-85f1-4f59f2426932. Migration status is reverting {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2432.020714] env[63241]: INFO nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating resource usage from migration 3672a3d7-5728-41b6-85f1-4f59f2426932 [ 2432.038577] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Migration 3672a3d7-5728-41b6-85f1-4f59f2426932 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2432.038722] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance 34a55ddd-ed2c-4f7a-8080-a0c9d771925d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2432.038893] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2432.039063] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=960MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2432.074795] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59685c3d-f07a-436f-b70f-05056ad33ca1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.082190] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0bc7bc-8134-488f-b664-42a53b98b7bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.111579] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9d64e3-259c-4c45-891d-b64531d28852 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.118496] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9ac9ef-d026-43fb-bd50-292a8d3ae395 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2432.131784] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2432.634744] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2432.795959] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2433.139824] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2433.140071] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.127s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2433.140331] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.345s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2433.643910] env[63241]: DEBUG nova.objects.instance [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'migration_context' on Instance uuid 34a55ddd-ed2c-4f7a-8080-a0c9d771925d {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2434.191278] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675efdd3-3241-49d1-9c30-f47db74132e4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2434.199337] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0b1593-210f-49ec-8ff5-fac4bb0c99bc {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2434.229879] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2979b46-b876-4793-9b36-5718046eadeb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2434.237918] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f164c8-bbd8-4ee6-81e9-8f7e6d0c7e0b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2434.251239] env[63241]: DEBUG nova.compute.provider_tree [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2434.755044] env[63241]: DEBUG nova.scheduler.client.report [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2435.767828] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.627s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2437.301938] env[63241]: INFO nova.compute.manager [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Swapping old allocation on dict_keys(['9a5e30eb-ceae-4224-aa66-dcbfa98ce24b']) held by migration 3672a3d7-5728-41b6-85f1-4f59f2426932 for instance [ 2437.323963] env[63241]: DEBUG nova.scheduler.client.report [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Overwriting current allocation {'allocations': {'9a5e30eb-ceae-4224-aa66-dcbfa98ce24b': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 199}}, 'project_id': '39ea2ef9af4742768fc75e7a839b0416', 'user_id': 'ecf711bb36ca4235920b16674379d0d3', 'consumer_generation': 1} on consumer 34a55ddd-ed2c-4f7a-8080-a0c9d771925d {{(pid=63241) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 2437.400124] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2437.400333] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2437.400514] env[63241]: DEBUG nova.network.neutron [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2438.104440] env[63241]: DEBUG nova.network.neutron [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance_info_cache with network_info: [{"id": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "address": "fa:16:3e:6d:57:09", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap790566e9-9e", "ovs_interfaceid": "790566e9-9e07-4ae3-ab15-d1ef783fe49e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2438.607601] env[63241]: DEBUG oslo_concurrency.lockutils [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-34a55ddd-ed2c-4f7a-8080-a0c9d771925d" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2438.609131] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f6817c-f8e1-4dc0-a1c8-6e7efff35250 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2438.617060] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af9cf58-f2f7-4385-a7a6-f906d8d0c23a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.697279] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2439.697624] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d335d115-110f-406e-8f76-a2224c889bf9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2439.705274] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2439.705274] env[63241]: value = "task-1821507" [ 2439.705274] env[63241]: _type = "Task" [ 2439.705274] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2439.712933] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821507, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2440.215526] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821507, 'name': PowerOffVM_Task, 'duration_secs': 0.231396} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2440.215798] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2440.216514] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2440.216731] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2440.216889] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2440.217084] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2440.217240] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2440.217397] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2440.217631] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2440.217794] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2440.217961] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2440.218141] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2440.218315] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2440.223398] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-420ee9c9-cfa6-4679-82c1-5e97ed245ec2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.238784] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2440.238784] env[63241]: value = "task-1821508" [ 2440.238784] env[63241]: _type = "Task" [ 2440.238784] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2440.246318] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821508, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2440.748913] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821508, 'name': ReconfigVM_Task, 'duration_secs': 0.138177} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2440.749665] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4af8996-3ea6-482d-9864-486a98a63dfa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.770998] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2440.771248] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2440.771408] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2440.771595] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2440.771743] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2440.771890] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2440.772135] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2440.772308] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2440.772476] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2440.772638] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2440.772806] env[63241]: DEBUG nova.virt.hardware [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2440.773569] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb5b8327-0a88-4e6b-bb04-1c8ff80fd511 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.778403] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2440.778403] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52876d07-b2d5-d4df-76f9-43a87b0cdd7b" [ 2440.778403] env[63241]: _type = "Task" [ 2440.778403] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2440.785450] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52876d07-b2d5-d4df-76f9-43a87b0cdd7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2441.288165] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52876d07-b2d5-d4df-76f9-43a87b0cdd7b, 'name': SearchDatastore_Task, 'duration_secs': 0.011474} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2441.293538] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfiguring VM instance instance-0000007c to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2441.293805] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f307673-4dfb-48b4-a327-0022a9da9fef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.310923] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2441.310923] env[63241]: value = "task-1821509" [ 2441.310923] env[63241]: _type = "Task" [ 2441.310923] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2441.318376] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821509, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2441.821491] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821509, 'name': ReconfigVM_Task, 'duration_secs': 0.195514} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2441.821869] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfigured VM instance instance-0000007c to detach disk 2000 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2441.822551] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc93fbd1-0d03-4586-a10f-a194dafafecd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.846490] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 34a55ddd-ed2c-4f7a-8080-a0c9d771925d/34a55ddd-ed2c-4f7a-8080-a0c9d771925d.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2441.846738] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef7f622f-bfab-4841-9695-7e212d8f42c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.863563] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2441.863563] env[63241]: value = "task-1821510" [ 2441.863563] env[63241]: _type = "Task" [ 2441.863563] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2441.870686] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821510, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2442.373422] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821510, 'name': ReconfigVM_Task, 'duration_secs': 0.26974} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2442.373702] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 34a55ddd-ed2c-4f7a-8080-a0c9d771925d/34a55ddd-ed2c-4f7a-8080-a0c9d771925d.vmdk or device None with type thin {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2442.374601] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b2a5bc-83c9-43e4-8976-e3b696828b2a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.394890] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2771efa7-bb60-499e-af5f-edffc37862ea {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.416371] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b9f89c-37f6-47bf-902b-4323834d7a2e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.436549] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25c57c6-4e20-4d47-8054-7965f1dd9a0d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.443127] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2442.443346] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30423a8c-858b-4872-8f4e-288572a9c843 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.449451] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2442.449451] env[63241]: value = "task-1821511" [ 2442.449451] env[63241]: _type = "Task" [ 2442.449451] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2442.456705] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821511, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2442.959734] env[63241]: DEBUG oslo_vmware.api [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821511, 'name': PowerOnVM_Task, 'duration_secs': 0.342971} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2442.960147] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2444.002270] env[63241]: INFO nova.compute.manager [None req-c09f0098-8a82-44e0-a200-044fc8a65ab0 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance to original state: 'active' [ 2445.722219] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2445.722619] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2445.722703] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2445.722889] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2445.723075] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2445.725269] env[63241]: INFO nova.compute.manager [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Terminating instance [ 2445.727116] env[63241]: DEBUG nova.compute.manager [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2445.727343] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2445.727575] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2200b438-56a9-4fa1-a831-1f2346ca1a7a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.734194] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2445.734194] env[63241]: value = "task-1821512" [ 2445.734194] env[63241]: _type = "Task" [ 2445.734194] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2445.742609] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821512, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2446.244420] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821512, 'name': PowerOffVM_Task, 'duration_secs': 0.174255} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2446.244688] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2446.244881] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Volume detach. Driver type: vmdk {{(pid=63241) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2446.245088] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377257', 'volume_id': 'ed97d171-dfe5-4441-b476-b2f6f934074e', 'name': 'volume-ed97d171-dfe5-4441-b476-b2f6f934074e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '34a55ddd-ed2c-4f7a-8080-a0c9d771925d', 'attached_at': '2024-12-12T15:41:36.000000', 'detached_at': '', 'volume_id': 'ed97d171-dfe5-4441-b476-b2f6f934074e', 'serial': 'ed97d171-dfe5-4441-b476-b2f6f934074e'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2446.245817] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0009da-eac3-407e-9187-d5df26775482 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.267124] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6fe86f-a7fa-4d82-bbdf-cd91869610f6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.273384] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262a52ef-4d1e-40dd-aebb-5ca8bade66b7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.292714] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafd557f-ddf6-4d40-9cb3-a30351856856 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.306305] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] The volume has not been displaced from its original location: [datastore1] volume-ed97d171-dfe5-4441-b476-b2f6f934074e/volume-ed97d171-dfe5-4441-b476-b2f6f934074e.vmdk. No consolidation needed. {{(pid=63241) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2446.311521] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2446.311778] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1e7527a-3312-40e6-bac8-2f4b29db476e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.329164] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2446.329164] env[63241]: value = "task-1821513" [ 2446.329164] env[63241]: _type = "Task" [ 2446.329164] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2446.336271] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821513, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2446.839191] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821513, 'name': ReconfigVM_Task, 'duration_secs': 0.180644} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2446.839593] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=63241) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2446.844244] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3272bfd-6c64-4e48-996f-c041c6bc6ba6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.858507] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2446.858507] env[63241]: value = "task-1821514" [ 2446.858507] env[63241]: _type = "Task" [ 2446.858507] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2446.866143] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821514, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2447.368237] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821514, 'name': ReconfigVM_Task, 'duration_secs': 0.134651} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2447.368550] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-377257', 'volume_id': 'ed97d171-dfe5-4441-b476-b2f6f934074e', 'name': 'volume-ed97d171-dfe5-4441-b476-b2f6f934074e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '34a55ddd-ed2c-4f7a-8080-a0c9d771925d', 'attached_at': '2024-12-12T15:41:36.000000', 'detached_at': '', 'volume_id': 'ed97d171-dfe5-4441-b476-b2f6f934074e', 'serial': 'ed97d171-dfe5-4441-b476-b2f6f934074e'} {{(pid=63241) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2447.368833] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2447.369594] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb03b2c-93d8-4bc4-b977-9d67f8666425 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2447.376036] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2447.376225] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b22a62ea-eb1f-465f-9cdc-84b71e49b69a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2447.445846] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2447.446020] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2447.446251] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleting the datastore file [datastore1] 34a55ddd-ed2c-4f7a-8080-a0c9d771925d {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2447.446377] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a012637-5fbd-4e62-b13b-e03f043817ff {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2447.455430] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2447.455430] env[63241]: value = "task-1821516" [ 2447.455430] env[63241]: _type = "Task" [ 2447.455430] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2447.464083] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821516, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2447.967544] env[63241]: DEBUG oslo_vmware.api [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821516, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15297} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2447.967944] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2447.967944] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2447.968131] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2447.968308] env[63241]: INFO nova.compute.manager [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Took 2.24 seconds to destroy the instance on the hypervisor. [ 2447.968550] env[63241]: DEBUG oslo.service.loopingcall [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2447.968741] env[63241]: DEBUG nova.compute.manager [-] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2447.968836] env[63241]: DEBUG nova.network.neutron [-] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2448.394386] env[63241]: DEBUG nova.compute.manager [req-5441bc40-cc7f-4700-9002-609471659335 req-ebe793da-964e-4efe-aeae-0990cfbf291b service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Received event network-vif-deleted-790566e9-9e07-4ae3-ab15-d1ef783fe49e {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2448.394603] env[63241]: INFO nova.compute.manager [req-5441bc40-cc7f-4700-9002-609471659335 req-ebe793da-964e-4efe-aeae-0990cfbf291b service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Neutron deleted interface 790566e9-9e07-4ae3-ab15-d1ef783fe49e; detaching it from the instance and deleting it from the info cache [ 2448.394924] env[63241]: DEBUG nova.network.neutron [req-5441bc40-cc7f-4700-9002-609471659335 req-ebe793da-964e-4efe-aeae-0990cfbf291b service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2448.868137] env[63241]: DEBUG nova.network.neutron [-] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2448.896953] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83d3b5ea-8a15-4647-8855-005d2ea44cb7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.906822] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48578779-6976-48d1-bbea-baaa3cb6c56b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.930043] env[63241]: DEBUG nova.compute.manager [req-5441bc40-cc7f-4700-9002-609471659335 req-ebe793da-964e-4efe-aeae-0990cfbf291b service nova] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Detach interface failed, port_id=790566e9-9e07-4ae3-ab15-d1ef783fe49e, reason: Instance 34a55ddd-ed2c-4f7a-8080-a0c9d771925d could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2449.371739] env[63241]: INFO nova.compute.manager [-] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Took 1.40 seconds to deallocate network for instance. [ 2449.914158] env[63241]: INFO nova.compute.manager [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: 34a55ddd-ed2c-4f7a-8080-a0c9d771925d] Took 0.54 seconds to detach 1 volumes for instance. [ 2450.420842] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2450.421226] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2450.421341] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2450.443551] env[63241]: INFO nova.scheduler.client.report [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleted allocations for instance 34a55ddd-ed2c-4f7a-8080-a0c9d771925d [ 2450.950712] env[63241]: DEBUG oslo_concurrency.lockutils [None req-8e527b3f-dad2-4802-90fb-2ee7db219ae1 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "34a55ddd-ed2c-4f7a-8080-a0c9d771925d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.228s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2451.711532] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2451.711882] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2452.214665] env[63241]: DEBUG nova.compute.manager [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Starting instance... {{(pid=63241) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2452.738074] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2452.738356] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2452.739835] env[63241]: INFO nova.compute.claims [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2453.775467] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ef8248-3747-46f1-94ea-7e98e2a5afa0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.783226] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0613eb-59a7-4d48-9c5c-bfab7bb5c104 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.812272] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8168bdcb-5936-4be0-84c0-79ca814910d3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.819576] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ccebcb-6b91-48eb-9618-75a9d6c40247 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.832017] env[63241]: DEBUG nova.compute.provider_tree [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2454.351163] env[63241]: ERROR nova.scheduler.client.report [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [req-3d94c7df-2652-4024-8977-3c6edeaf1687] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3d94c7df-2652-4024-8977-3c6edeaf1687"}]} [ 2454.366503] env[63241]: DEBUG nova.scheduler.client.report [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2454.379071] env[63241]: DEBUG nova.scheduler.client.report [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2454.379281] env[63241]: DEBUG nova.compute.provider_tree [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2454.388429] env[63241]: DEBUG nova.scheduler.client.report [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2454.404663] env[63241]: DEBUG nova.scheduler.client.report [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2454.425895] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3c2afb-f6c4-46f9-91fa-43596426558a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.433218] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f25ae5a-f6c2-43f8-995d-a722fbb7f1df {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.464170] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c752df0e-1070-4376-b8f9-90281ac97b32 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.471250] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4ee418-658a-4fd3-abd7-214bd1ea57f2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.484282] env[63241]: DEBUG nova.compute.provider_tree [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2455.012734] env[63241]: DEBUG nova.scheduler.client.report [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 201 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2455.013068] env[63241]: DEBUG nova.compute.provider_tree [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 201 to 202 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2455.013269] env[63241]: DEBUG nova.compute.provider_tree [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2455.518370] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.780s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2455.518879] env[63241]: DEBUG nova.compute.manager [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Start building networks asynchronously for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2456.024909] env[63241]: DEBUG nova.compute.utils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Using /dev/sd instead of None {{(pid=63241) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2456.026401] env[63241]: DEBUG nova.compute.manager [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Allocating IP information in the background. {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2456.026580] env[63241]: DEBUG nova.network.neutron [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] allocate_for_instance() {{(pid=63241) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2456.074834] env[63241]: DEBUG nova.policy [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ecf711bb36ca4235920b16674379d0d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39ea2ef9af4742768fc75e7a839b0416', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63241) authorize /opt/stack/nova/nova/policy.py:201}} [ 2456.338855] env[63241]: DEBUG nova.network.neutron [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Successfully created port: 47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2456.529812] env[63241]: DEBUG nova.compute.manager [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Start building block device mappings for instance. {{(pid=63241) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2457.539069] env[63241]: DEBUG nova.compute.manager [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Start spawning the instance on the hypervisor. {{(pid=63241) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2457.564270] env[63241]: DEBUG nova.virt.hardware [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T15:07:37Z,direct_url=,disk_format='vmdk',id=e128f8d9-813d-4846-9a6e-b4c4717cd5b4,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='879e1def15b743fd96e9c706b3cdb82f',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T15:07:37Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2457.564529] env[63241]: DEBUG nova.virt.hardware [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2457.564685] env[63241]: DEBUG nova.virt.hardware [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2457.564882] env[63241]: DEBUG nova.virt.hardware [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2457.565043] env[63241]: DEBUG nova.virt.hardware [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2457.565193] env[63241]: DEBUG nova.virt.hardware [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2457.565449] env[63241]: DEBUG nova.virt.hardware [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2457.565574] env[63241]: DEBUG nova.virt.hardware [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2457.565714] env[63241]: DEBUG nova.virt.hardware [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2457.565878] env[63241]: DEBUG nova.virt.hardware [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2457.566062] env[63241]: DEBUG nova.virt.hardware [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2457.566939] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a9437d-8cf0-4f0b-ae3f-451da79cdea0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.574847] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c75aaed-8e9b-4d47-982d-56fe0858ece3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.795913] env[63241]: DEBUG nova.compute.manager [req-56d5e73c-ad3a-4cfe-8582-35c099680794 req-7c549635-f7f6-4157-83b0-eee7edbf75f4 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Received event network-vif-plugged-47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2457.796118] env[63241]: DEBUG oslo_concurrency.lockutils [req-56d5e73c-ad3a-4cfe-8582-35c099680794 req-7c549635-f7f6-4157-83b0-eee7edbf75f4 service nova] Acquiring lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2457.796325] env[63241]: DEBUG oslo_concurrency.lockutils [req-56d5e73c-ad3a-4cfe-8582-35c099680794 req-7c549635-f7f6-4157-83b0-eee7edbf75f4 service nova] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2457.796491] env[63241]: DEBUG oslo_concurrency.lockutils [req-56d5e73c-ad3a-4cfe-8582-35c099680794 req-7c549635-f7f6-4157-83b0-eee7edbf75f4 service nova] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2457.796652] env[63241]: DEBUG nova.compute.manager [req-56d5e73c-ad3a-4cfe-8582-35c099680794 req-7c549635-f7f6-4157-83b0-eee7edbf75f4 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] No waiting events found dispatching network-vif-plugged-47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2457.796817] env[63241]: WARNING nova.compute.manager [req-56d5e73c-ad3a-4cfe-8582-35c099680794 req-7c549635-f7f6-4157-83b0-eee7edbf75f4 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Received unexpected event network-vif-plugged-47ef8579-84c9-45a0-bec4-9baf8c6792e7 for instance with vm_state building and task_state spawning. [ 2457.879110] env[63241]: DEBUG nova.network.neutron [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Successfully updated port: 47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2458.381386] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2458.381653] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2458.381707] env[63241]: DEBUG nova.network.neutron [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2458.912945] env[63241]: DEBUG nova.network.neutron [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Instance cache missing network info. {{(pid=63241) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2459.032183] env[63241]: DEBUG nova.network.neutron [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updating instance_info_cache with network_info: [{"id": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "address": "fa:16:3e:8d:8a:6f", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47ef8579-84", "ovs_interfaceid": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2459.534627] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2459.534959] env[63241]: DEBUG nova.compute.manager [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Instance network_info: |[{"id": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "address": "fa:16:3e:8d:8a:6f", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47ef8579-84", "ovs_interfaceid": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63241) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2459.535434] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:8a:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47ef8579-84c9-45a0-bec4-9baf8c6792e7', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2459.542942] env[63241]: DEBUG oslo.service.loopingcall [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2459.543159] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2459.543389] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ec9d28b-2ca6-4faf-a5d2-6605709056af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.563561] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2459.563561] env[63241]: value = "task-1821517" [ 2459.563561] env[63241]: _type = "Task" [ 2459.563561] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2459.570817] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821517, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2459.826219] env[63241]: DEBUG nova.compute.manager [req-d6318799-d5f8-4727-b71a-891a5fa36cb1 req-684e48b2-7446-4b9e-b5f9-349e9a0f6bbe service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Received event network-changed-47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2459.826482] env[63241]: DEBUG nova.compute.manager [req-d6318799-d5f8-4727-b71a-891a5fa36cb1 req-684e48b2-7446-4b9e-b5f9-349e9a0f6bbe service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Refreshing instance network info cache due to event network-changed-47ef8579-84c9-45a0-bec4-9baf8c6792e7. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2459.826794] env[63241]: DEBUG oslo_concurrency.lockutils [req-d6318799-d5f8-4727-b71a-891a5fa36cb1 req-684e48b2-7446-4b9e-b5f9-349e9a0f6bbe service nova] Acquiring lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2459.826980] env[63241]: DEBUG oslo_concurrency.lockutils [req-d6318799-d5f8-4727-b71a-891a5fa36cb1 req-684e48b2-7446-4b9e-b5f9-349e9a0f6bbe service nova] Acquired lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2459.827170] env[63241]: DEBUG nova.network.neutron [req-d6318799-d5f8-4727-b71a-891a5fa36cb1 req-684e48b2-7446-4b9e-b5f9-349e9a0f6bbe service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Refreshing network info cache for port 47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2460.073754] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821517, 'name': CreateVM_Task, 'duration_secs': 0.289693} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2460.074179] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2460.074642] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2460.074812] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2460.075155] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2460.075403] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb4575b7-af9a-4157-8e80-b1b92b81f9ef {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.080111] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2460.080111] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]5272b81f-420b-f07d-d8ef-674238799ef6" [ 2460.080111] env[63241]: _type = "Task" [ 2460.080111] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2460.087945] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5272b81f-420b-f07d-d8ef-674238799ef6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2460.505519] env[63241]: DEBUG nova.network.neutron [req-d6318799-d5f8-4727-b71a-891a5fa36cb1 req-684e48b2-7446-4b9e-b5f9-349e9a0f6bbe service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updated VIF entry in instance network info cache for port 47ef8579-84c9-45a0-bec4-9baf8c6792e7. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2460.505871] env[63241]: DEBUG nova.network.neutron [req-d6318799-d5f8-4727-b71a-891a5fa36cb1 req-684e48b2-7446-4b9e-b5f9-349e9a0f6bbe service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updating instance_info_cache with network_info: [{"id": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "address": "fa:16:3e:8d:8a:6f", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47ef8579-84", "ovs_interfaceid": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2460.590921] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]5272b81f-420b-f07d-d8ef-674238799ef6, 'name': SearchDatastore_Task, 'duration_secs': 0.010751} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2460.591217] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2460.591442] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Processing image e128f8d9-813d-4846-9a6e-b4c4717cd5b4 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2460.591730] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2460.591881] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2460.592105] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2460.592400] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59c75521-d2f3-4c02-9dbe-becede9b5a9b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.600482] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2460.600633] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2460.601302] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69461bf5-4cfc-45bf-9811-3a08e6983f02 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.605919] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2460.605919] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ccc6fe-bb1a-3dc1-df46-ba55270f172e" [ 2460.605919] env[63241]: _type = "Task" [ 2460.605919] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2460.612900] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ccc6fe-bb1a-3dc1-df46-ba55270f172e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2461.008521] env[63241]: DEBUG oslo_concurrency.lockutils [req-d6318799-d5f8-4727-b71a-891a5fa36cb1 req-684e48b2-7446-4b9e-b5f9-349e9a0f6bbe service nova] Releasing lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2461.116321] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52ccc6fe-bb1a-3dc1-df46-ba55270f172e, 'name': SearchDatastore_Task, 'duration_secs': 0.007782} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2461.117084] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cd9ff36-1acd-450b-b408-6c1c0e93d88d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.122238] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2461.122238] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c013c0-dcb1-29d2-9d9e-88c7c4127512" [ 2461.122238] env[63241]: _type = "Task" [ 2461.122238] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2461.129671] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c013c0-dcb1-29d2-9d9e-88c7c4127512, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2461.632775] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c013c0-dcb1-29d2-9d9e-88c7c4127512, 'name': SearchDatastore_Task, 'duration_secs': 0.009578} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2461.633041] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2461.633292] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7/ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2461.633548] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a823cb1e-8fbe-4eb8-bbaf-54becbaf5512 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.639945] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2461.639945] env[63241]: value = "task-1821518" [ 2461.639945] env[63241]: _type = "Task" [ 2461.639945] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2461.647223] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2462.149945] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821518, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.395615} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2462.150277] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e128f8d9-813d-4846-9a6e-b4c4717cd5b4/e128f8d9-813d-4846-9a6e-b4c4717cd5b4.vmdk to [datastore1] ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7/ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2462.150438] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Extending root virtual disk to 1048576 {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2462.150681] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1f76948-5b23-4f0e-acb5-561e67575478 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.157295] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2462.157295] env[63241]: value = "task-1821519" [ 2462.157295] env[63241]: _type = "Task" [ 2462.157295] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2462.164345] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821519, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2462.667572] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821519, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090077} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2462.667830] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Extended root virtual disk {{(pid=63241) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2462.668710] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df34b800-5a08-48ad-a546-0d9768b58430 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.690589] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7/ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2462.690872] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fba13c22-9f28-4809-b634-876fb4bcd900 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.710015] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2462.710015] env[63241]: value = "task-1821520" [ 2462.710015] env[63241]: _type = "Task" [ 2462.710015] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2462.721273] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821520, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2463.220149] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821520, 'name': ReconfigVM_Task, 'duration_secs': 0.274452} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2463.220481] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Reconfigured VM instance instance-0000007d to attach disk [datastore1] ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7/ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7.vmdk or device None with type sparse {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2463.221119] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b7ea6cc-e940-440a-bf64-7a66c3350a80 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.227797] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2463.227797] env[63241]: value = "task-1821521" [ 2463.227797] env[63241]: _type = "Task" [ 2463.227797] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2463.235365] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821521, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2463.737982] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821521, 'name': Rename_Task, 'duration_secs': 0.147786} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2463.738284] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2463.738566] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea13280e-f0ad-4515-9c7c-a2fb8ed278c9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.744504] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2463.744504] env[63241]: value = "task-1821522" [ 2463.744504] env[63241]: _type = "Task" [ 2463.744504] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2463.753041] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2464.254224] env[63241]: DEBUG oslo_vmware.api [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821522, 'name': PowerOnVM_Task, 'duration_secs': 0.409902} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2464.254540] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2464.254754] env[63241]: INFO nova.compute.manager [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Took 6.72 seconds to spawn the instance on the hypervisor. [ 2464.254935] env[63241]: DEBUG nova.compute.manager [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2464.255750] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9255f1d6-a87b-49d6-8b4c-ab9d66409de4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2464.775077] env[63241]: INFO nova.compute.manager [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Took 12.06 seconds to build instance. [ 2465.277380] env[63241]: DEBUG oslo_concurrency.lockutils [None req-f4a23565-bf33-484b-b132-26ef0ad4be03 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.565s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2465.852155] env[63241]: DEBUG nova.compute.manager [req-50e24139-2245-450f-8d87-4480f14be1f6 req-2db84cce-fa5d-4c2e-b6aa-12ae72bc251b service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Received event network-changed-47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2465.852505] env[63241]: DEBUG nova.compute.manager [req-50e24139-2245-450f-8d87-4480f14be1f6 req-2db84cce-fa5d-4c2e-b6aa-12ae72bc251b service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Refreshing instance network info cache due to event network-changed-47ef8579-84c9-45a0-bec4-9baf8c6792e7. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2465.852852] env[63241]: DEBUG oslo_concurrency.lockutils [req-50e24139-2245-450f-8d87-4480f14be1f6 req-2db84cce-fa5d-4c2e-b6aa-12ae72bc251b service nova] Acquiring lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2465.853204] env[63241]: DEBUG oslo_concurrency.lockutils [req-50e24139-2245-450f-8d87-4480f14be1f6 req-2db84cce-fa5d-4c2e-b6aa-12ae72bc251b service nova] Acquired lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2465.853440] env[63241]: DEBUG nova.network.neutron [req-50e24139-2245-450f-8d87-4480f14be1f6 req-2db84cce-fa5d-4c2e-b6aa-12ae72bc251b service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Refreshing network info cache for port 47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2466.559975] env[63241]: DEBUG nova.network.neutron [req-50e24139-2245-450f-8d87-4480f14be1f6 req-2db84cce-fa5d-4c2e-b6aa-12ae72bc251b service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updated VIF entry in instance network info cache for port 47ef8579-84c9-45a0-bec4-9baf8c6792e7. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2466.560384] env[63241]: DEBUG nova.network.neutron [req-50e24139-2245-450f-8d87-4480f14be1f6 req-2db84cce-fa5d-4c2e-b6aa-12ae72bc251b service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updating instance_info_cache with network_info: [{"id": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "address": "fa:16:3e:8d:8a:6f", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47ef8579-84", "ovs_interfaceid": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2467.063521] env[63241]: DEBUG oslo_concurrency.lockutils [req-50e24139-2245-450f-8d87-4480f14be1f6 req-2db84cce-fa5d-4c2e-b6aa-12ae72bc251b service nova] Releasing lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2477.135780] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2477.451999] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2477.451999] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2478.452805] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2482.453049] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2482.453049] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2482.453049] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2482.983777] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2482.983930] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquired lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2482.984091] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Forcefully refreshing network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2482.984274] env[63241]: DEBUG nova.objects.instance [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lazy-loading 'info_cache' on Instance uuid ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2484.690905] env[63241]: DEBUG nova.network.neutron [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updating instance_info_cache with network_info: [{"id": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "address": "fa:16:3e:8d:8a:6f", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47ef8579-84", "ovs_interfaceid": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2485.193462] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Releasing lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2485.193698] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updated the network info_cache for instance {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2485.193876] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2485.194056] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2485.451337] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2489.451058] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2491.451851] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2491.955747] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2491.955960] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2491.956146] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2491.956307] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2491.957297] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e42f7c-3d86-4dd1-b80c-377913cb6f7f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2491.965938] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99f1cfb-0d42-4360-b1f4-8b41703366d7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2491.980234] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e32a2be-5e4c-46c2-9eb6-5e8661d36546 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2491.986393] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e772b8af-a581-438e-800e-0173a68549b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2492.015162] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181326MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2492.015321] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2492.015526] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2493.042224] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Instance ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63241) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2493.042542] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2493.042608] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2493.066706] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db0731e-e0ee-4d17-8332-e72ff3049cda {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2493.074243] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc5df28-91d3-456e-ad3c-9ee12322defa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2493.103833] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a376ca34-35ff-418f-b766-8f928ef6bb6a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2493.110579] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631ded1e-762f-487c-8929-4a9de012399c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2493.123404] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2493.654391] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 202 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2493.654620] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 202 to 203 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2493.654769] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2494.159224] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2494.159671] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.144s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2502.451054] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2502.451451] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2502.451451] env[63241]: INFO nova.compute.manager [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Shelving [ 2502.958324] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2502.958582] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75137971-f2af-40f7-bf49-18ee5d3519fd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2502.967891] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2502.967891] env[63241]: value = "task-1821523" [ 2502.967891] env[63241]: _type = "Task" [ 2502.967891] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2502.976028] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2503.478482] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821523, 'name': PowerOffVM_Task, 'duration_secs': 0.160004} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2503.478873] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2503.479508] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b507e1-ad93-420d-ad4f-a9a30dfad3ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2503.497841] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff3eed8-66b0-4426-9fd5-d4f8b76b4180 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.007930] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Creating Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2504.008258] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-154ea23d-c9f7-4766-a1ee-5d2317ba792d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.016365] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2504.016365] env[63241]: value = "task-1821524" [ 2504.016365] env[63241]: _type = "Task" [ 2504.016365] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2504.024226] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821524, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2504.527122] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821524, 'name': CreateSnapshot_Task, 'duration_secs': 0.42071} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2504.527505] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Created Snapshot of the VM instance {{(pid=63241) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2504.527906] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4a4a0f-7ac6-464d-af6a-8d8246d68743 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.044909] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Creating linked-clone VM from snapshot {{(pid=63241) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2505.045198] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ba0e5319-a49f-4206-a671-42cc36367e07 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.053220] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2505.053220] env[63241]: value = "task-1821525" [ 2505.053220] env[63241]: _type = "Task" [ 2505.053220] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2505.061012] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821525, 'name': CloneVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2505.563570] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821525, 'name': CloneVM_Task} progress is 94%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2506.063780] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821525, 'name': CloneVM_Task, 'duration_secs': 0.938112} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2506.064053] env[63241]: INFO nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Created linked-clone VM from snapshot [ 2506.064792] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbe33df-ec6f-4836-9be3-e922a1b921c5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.071657] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Uploading image d0f99cfb-21b1-4f33-adae-dc976f0ca7f7 {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2506.091129] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2506.091129] env[63241]: value = "vm-377260" [ 2506.091129] env[63241]: _type = "VirtualMachine" [ 2506.091129] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2506.091365] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-cde68ff4-c142-4052-ad33-6f4315a54962 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.098390] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lease: (returnval){ [ 2506.098390] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529c1dab-b1cc-7913-fa68-5d7cbe8607be" [ 2506.098390] env[63241]: _type = "HttpNfcLease" [ 2506.098390] env[63241]: } obtained for exporting VM: (result){ [ 2506.098390] env[63241]: value = "vm-377260" [ 2506.098390] env[63241]: _type = "VirtualMachine" [ 2506.098390] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2506.098794] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the lease: (returnval){ [ 2506.098794] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529c1dab-b1cc-7913-fa68-5d7cbe8607be" [ 2506.098794] env[63241]: _type = "HttpNfcLease" [ 2506.098794] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2506.104642] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2506.104642] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529c1dab-b1cc-7913-fa68-5d7cbe8607be" [ 2506.104642] env[63241]: _type = "HttpNfcLease" [ 2506.104642] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2506.606327] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2506.606327] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529c1dab-b1cc-7913-fa68-5d7cbe8607be" [ 2506.606327] env[63241]: _type = "HttpNfcLease" [ 2506.606327] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2506.606812] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2506.606812] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529c1dab-b1cc-7913-fa68-5d7cbe8607be" [ 2506.606812] env[63241]: _type = "HttpNfcLease" [ 2506.606812] env[63241]: }. {{(pid=63241) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2506.607360] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd198e1-4594-4b26-bc2c-a6b43aa91370 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.616088] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52122f63-caef-7a63-4729-e492cb35226e/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2506.616266] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52122f63-caef-7a63-4729-e492cb35226e/disk-0.vmdk for reading. {{(pid=63241) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2506.703236] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0c4d73b0-4eda-4096-ab74-d0a7f46870a9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2513.051535] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52122f63-caef-7a63-4729-e492cb35226e/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2513.052566] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55da116c-8624-46df-9f3f-52743011581f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2513.058842] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52122f63-caef-7a63-4729-e492cb35226e/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2513.059011] env[63241]: ERROR oslo_vmware.rw_handles [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52122f63-caef-7a63-4729-e492cb35226e/disk-0.vmdk due to incomplete transfer. [ 2513.059234] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-32e6390d-2746-49b0-a09f-444b228f04b9 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2513.066313] env[63241]: DEBUG oslo_vmware.rw_handles [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52122f63-caef-7a63-4729-e492cb35226e/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2513.066501] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Uploaded image d0f99cfb-21b1-4f33-adae-dc976f0ca7f7 to the Glance image server {{(pid=63241) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2513.068655] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Destroying the VM {{(pid=63241) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2513.068894] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5e2bf4c8-d8a2-43f1-8f11-1fcb6a708c88 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2513.073917] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2513.073917] env[63241]: value = "task-1821527" [ 2513.073917] env[63241]: _type = "Task" [ 2513.073917] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2513.082020] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821527, 'name': Destroy_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2513.583585] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821527, 'name': Destroy_Task, 'duration_secs': 0.30065} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2513.583857] env[63241]: INFO nova.virt.vmwareapi.vm_util [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Destroyed the VM [ 2513.584111] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Deleting Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2513.584359] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-eb426a74-7f9b-4e86-be81-5832e5e2b68d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2513.590858] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2513.590858] env[63241]: value = "task-1821528" [ 2513.590858] env[63241]: _type = "Task" [ 2513.590858] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2513.598348] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821528, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2514.101342] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821528, 'name': RemoveSnapshot_Task, 'duration_secs': 0.36744} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2514.101741] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Deleted Snapshot of the VM instance {{(pid=63241) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2514.101887] env[63241]: DEBUG nova.compute.manager [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2514.102775] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225da30a-a712-43de-8e39-c9a8e9b7157b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2514.616056] env[63241]: INFO nova.compute.manager [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Shelve offloading [ 2514.617837] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2514.618167] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac183c72-55f2-4059-9966-e2134c07ab0c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2514.625902] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2514.625902] env[63241]: value = "task-1821529" [ 2514.625902] env[63241]: _type = "Task" [ 2514.625902] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2514.633242] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821529, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2515.135894] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] VM already powered off {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2515.136287] env[63241]: DEBUG nova.compute.manager [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2515.136848] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-858ccbc1-4490-440b-8a75-c201c269bc6e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2515.142203] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2515.142372] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2515.142556] env[63241]: DEBUG nova.network.neutron [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2515.846216] env[63241]: DEBUG nova.network.neutron [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updating instance_info_cache with network_info: [{"id": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "address": "fa:16:3e:8d:8a:6f", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47ef8579-84", "ovs_interfaceid": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2516.348925] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2516.556234] env[63241]: DEBUG nova.compute.manager [req-fe150736-283e-499c-bda4-02f8b1e0299c req-0b608511-d7db-423e-9265-0426fba3306d service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Received event network-vif-unplugged-47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2516.556444] env[63241]: DEBUG oslo_concurrency.lockutils [req-fe150736-283e-499c-bda4-02f8b1e0299c req-0b608511-d7db-423e-9265-0426fba3306d service nova] Acquiring lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2516.556654] env[63241]: DEBUG oslo_concurrency.lockutils [req-fe150736-283e-499c-bda4-02f8b1e0299c req-0b608511-d7db-423e-9265-0426fba3306d service nova] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2516.556823] env[63241]: DEBUG oslo_concurrency.lockutils [req-fe150736-283e-499c-bda4-02f8b1e0299c req-0b608511-d7db-423e-9265-0426fba3306d service nova] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2516.556989] env[63241]: DEBUG nova.compute.manager [req-fe150736-283e-499c-bda4-02f8b1e0299c req-0b608511-d7db-423e-9265-0426fba3306d service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] No waiting events found dispatching network-vif-unplugged-47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2516.557158] env[63241]: WARNING nova.compute.manager [req-fe150736-283e-499c-bda4-02f8b1e0299c req-0b608511-d7db-423e-9265-0426fba3306d service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Received unexpected event network-vif-unplugged-47ef8579-84c9-45a0-bec4-9baf8c6792e7 for instance with vm_state shelved and task_state shelving_offloading. [ 2516.687287] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2516.688160] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f5e054-9909-48a7-9efe-0976e700565c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2516.695832] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2516.696081] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aee80edd-bd88-4a24-88c8-347d871fa0f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2516.762466] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2516.762714] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2516.762835] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleting the datastore file [datastore1] ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2516.763133] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a2d3b733-8924-4863-9b19-82aa49179a49 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2516.769188] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2516.769188] env[63241]: value = "task-1821531" [ 2516.769188] env[63241]: _type = "Task" [ 2516.769188] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2516.777648] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821531, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2517.280054] env[63241]: DEBUG oslo_vmware.api [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821531, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126787} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2517.280365] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2517.280593] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2517.280812] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2517.305120] env[63241]: INFO nova.scheduler.client.report [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleted allocations for instance ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 [ 2517.810303] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2517.810576] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2517.810861] env[63241]: DEBUG nova.objects.instance [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'resources' on Instance uuid ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2518.315407] env[63241]: DEBUG nova.objects.instance [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'numa_topology' on Instance uuid ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2518.586752] env[63241]: DEBUG nova.compute.manager [req-9ea803b7-2213-4b49-aebf-d0603a477caa req-19f5aff2-aa5d-408f-99e9-0e7053478867 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Received event network-changed-47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2518.586965] env[63241]: DEBUG nova.compute.manager [req-9ea803b7-2213-4b49-aebf-d0603a477caa req-19f5aff2-aa5d-408f-99e9-0e7053478867 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Refreshing instance network info cache due to event network-changed-47ef8579-84c9-45a0-bec4-9baf8c6792e7. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2518.587283] env[63241]: DEBUG oslo_concurrency.lockutils [req-9ea803b7-2213-4b49-aebf-d0603a477caa req-19f5aff2-aa5d-408f-99e9-0e7053478867 service nova] Acquiring lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2518.587444] env[63241]: DEBUG oslo_concurrency.lockutils [req-9ea803b7-2213-4b49-aebf-d0603a477caa req-19f5aff2-aa5d-408f-99e9-0e7053478867 service nova] Acquired lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2518.587609] env[63241]: DEBUG nova.network.neutron [req-9ea803b7-2213-4b49-aebf-d0603a477caa req-19f5aff2-aa5d-408f-99e9-0e7053478867 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Refreshing network info cache for port 47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2518.818589] env[63241]: DEBUG nova.objects.base [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63241) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2518.846951] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2de2193-3ad8-4004-89e3-7105995e97d5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2518.854235] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fe840e-cd14-4cc5-8ffc-6b8989c2db73 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2518.884577] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4d1a9c-5aec-4bca-bb01-d5a11df2a511 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2518.891776] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba48dcd0-4693-45a0-b482-becf6eb208c4 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2518.904552] env[63241]: DEBUG nova.compute.provider_tree [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2519.120206] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2519.356144] env[63241]: DEBUG nova.network.neutron [req-9ea803b7-2213-4b49-aebf-d0603a477caa req-19f5aff2-aa5d-408f-99e9-0e7053478867 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updated VIF entry in instance network info cache for port 47ef8579-84c9-45a0-bec4-9baf8c6792e7. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2519.356504] env[63241]: DEBUG nova.network.neutron [req-9ea803b7-2213-4b49-aebf-d0603a477caa req-19f5aff2-aa5d-408f-99e9-0e7053478867 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updating instance_info_cache with network_info: [{"id": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "address": "fa:16:3e:8d:8a:6f", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": null, "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap47ef8579-84", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2519.407501] env[63241]: DEBUG nova.scheduler.client.report [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2519.859366] env[63241]: DEBUG oslo_concurrency.lockutils [req-9ea803b7-2213-4b49-aebf-d0603a477caa req-19f5aff2-aa5d-408f-99e9-0e7053478867 service nova] Releasing lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2519.912371] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.102s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2520.422027] env[63241]: DEBUG oslo_concurrency.lockutils [None req-1ab6c80e-f02d-46a9-b1f8-735ba87f4e20 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 17.970s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2520.422602] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.302s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2520.422965] env[63241]: INFO nova.compute.manager [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Unshelving [ 2521.443694] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2521.443971] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2521.444196] env[63241]: DEBUG nova.objects.instance [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'pci_requests' on Instance uuid ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2521.947777] env[63241]: DEBUG nova.objects.instance [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'numa_topology' on Instance uuid ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2522.450675] env[63241]: INFO nova.compute.claims [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2523.486180] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca704db-7138-46f8-8eed-7e5970dfbd16 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2523.493901] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca1b2dc-44fa-4e91-91db-5202a800181f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2523.524768] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ca2f46-e397-4bdf-8016-352c734452b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2523.532023] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2e1d9e-4592-4d59-8b96-b2f533c3b129 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2523.545169] env[63241]: DEBUG nova.compute.provider_tree [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2524.048951] env[63241]: DEBUG nova.scheduler.client.report [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2524.553516] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.109s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2524.584104] env[63241]: INFO nova.network.neutron [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updating port 47ef8579-84c9-45a0-bec4-9baf8c6792e7 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2526.063900] env[63241]: DEBUG nova.compute.manager [req-38a1298f-92e1-470a-9a23-c4e81acabd51 req-2a9bc447-c111-4ccc-bdbe-65cfe91587a9 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Received event network-vif-plugged-47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2526.064135] env[63241]: DEBUG oslo_concurrency.lockutils [req-38a1298f-92e1-470a-9a23-c4e81acabd51 req-2a9bc447-c111-4ccc-bdbe-65cfe91587a9 service nova] Acquiring lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2526.064406] env[63241]: DEBUG oslo_concurrency.lockutils [req-38a1298f-92e1-470a-9a23-c4e81acabd51 req-2a9bc447-c111-4ccc-bdbe-65cfe91587a9 service nova] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2526.064505] env[63241]: DEBUG oslo_concurrency.lockutils [req-38a1298f-92e1-470a-9a23-c4e81acabd51 req-2a9bc447-c111-4ccc-bdbe-65cfe91587a9 service nova] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2526.064708] env[63241]: DEBUG nova.compute.manager [req-38a1298f-92e1-470a-9a23-c4e81acabd51 req-2a9bc447-c111-4ccc-bdbe-65cfe91587a9 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] No waiting events found dispatching network-vif-plugged-47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2526.064885] env[63241]: WARNING nova.compute.manager [req-38a1298f-92e1-470a-9a23-c4e81acabd51 req-2a9bc447-c111-4ccc-bdbe-65cfe91587a9 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Received unexpected event network-vif-plugged-47ef8579-84c9-45a0-bec4-9baf8c6792e7 for instance with vm_state shelved_offloaded and task_state spawning. [ 2526.147267] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2526.147442] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2526.147612] env[63241]: DEBUG nova.network.neutron [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Building network info cache for instance {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2526.847577] env[63241]: DEBUG nova.network.neutron [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updating instance_info_cache with network_info: [{"id": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "address": "fa:16:3e:8d:8a:6f", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47ef8579-84", "ovs_interfaceid": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2527.350732] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2527.394555] env[63241]: DEBUG nova.virt.hardware [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T15:07:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='79e95f2e61434754e7fddc88b6e0c5ae',container_format='bare',created_at=2024-12-12T15:42:39Z,direct_url=,disk_format='vmdk',id=d0f99cfb-21b1-4f33-adae-dc976f0ca7f7,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1069968707-shelved',owner='39ea2ef9af4742768fc75e7a839b0416',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2024-12-12T15:42:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2527.394823] env[63241]: DEBUG nova.virt.hardware [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2527.394984] env[63241]: DEBUG nova.virt.hardware [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image limits 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2527.395185] env[63241]: DEBUG nova.virt.hardware [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Flavor pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2527.395341] env[63241]: DEBUG nova.virt.hardware [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Image pref 0:0:0 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2527.395538] env[63241]: DEBUG nova.virt.hardware [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63241) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2527.395768] env[63241]: DEBUG nova.virt.hardware [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2527.395933] env[63241]: DEBUG nova.virt.hardware [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2527.396114] env[63241]: DEBUG nova.virt.hardware [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Got 1 possible topologies {{(pid=63241) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2527.396283] env[63241]: DEBUG nova.virt.hardware [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2527.396454] env[63241]: DEBUG nova.virt.hardware [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63241) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2527.397393] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d4e177-63ba-435b-967c-bdbd60858c6b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2527.406991] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5c5a07-f4ed-402f-8ebd-d2d8610b98b2 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2527.420240] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:8a:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47ef8579-84c9-45a0-bec4-9baf8c6792e7', 'vif_model': 'vmxnet3'}] {{(pid=63241) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2527.427870] env[63241]: DEBUG oslo.service.loopingcall [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2527.428132] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Creating VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2527.428344] env[63241]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-108707b5-b5b2-4b13-9446-f42fcad2025a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2527.447377] env[63241]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2527.447377] env[63241]: value = "task-1821532" [ 2527.447377] env[63241]: _type = "Task" [ 2527.447377] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2527.454463] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821532, 'name': CreateVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2527.956639] env[63241]: DEBUG oslo_vmware.api [-] Task: {'id': task-1821532, 'name': CreateVM_Task, 'duration_secs': 0.501211} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2527.956761] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Created VM on the ESX host {{(pid=63241) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2527.957448] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2527.957622] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2527.958011] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2527.958267] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2e7f266-a730-4097-a02a-60430f7b0232 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2527.962349] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2527.962349] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52c6a963-683f-740b-ba3a-1d9ccbb6ad3d" [ 2527.962349] env[63241]: _type = "Task" [ 2527.962349] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2527.969496] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]52c6a963-683f-740b-ba3a-1d9ccbb6ad3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2528.088653] env[63241]: DEBUG nova.compute.manager [req-a31ad256-f1f1-4896-8b98-9beaa58a2ecd req-53296a82-7736-4337-89e0-bb9c891e0658 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Received event network-changed-47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2528.088850] env[63241]: DEBUG nova.compute.manager [req-a31ad256-f1f1-4896-8b98-9beaa58a2ecd req-53296a82-7736-4337-89e0-bb9c891e0658 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Refreshing instance network info cache due to event network-changed-47ef8579-84c9-45a0-bec4-9baf8c6792e7. {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2528.089075] env[63241]: DEBUG oslo_concurrency.lockutils [req-a31ad256-f1f1-4896-8b98-9beaa58a2ecd req-53296a82-7736-4337-89e0-bb9c891e0658 service nova] Acquiring lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2528.089224] env[63241]: DEBUG oslo_concurrency.lockutils [req-a31ad256-f1f1-4896-8b98-9beaa58a2ecd req-53296a82-7736-4337-89e0-bb9c891e0658 service nova] Acquired lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2528.089386] env[63241]: DEBUG nova.network.neutron [req-a31ad256-f1f1-4896-8b98-9beaa58a2ecd req-53296a82-7736-4337-89e0-bb9c891e0658 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Refreshing network info cache for port 47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2528.474096] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2528.474096] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Processing image d0f99cfb-21b1-4f33-adae-dc976f0ca7f7 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2528.474096] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2528.474096] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2528.474096] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2528.474500] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a25ab842-1cf3-413a-827d-8e99477ec13f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.482310] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2528.482478] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63241) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2528.483152] env[63241]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de6504e4-d507-4a66-9349-91a11a20be31 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.487754] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2528.487754] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]529faab6-55b4-4fe0-ad80-f2bfcb7330ca" [ 2528.487754] env[63241]: _type = "Task" [ 2528.487754] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2528.494726] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': session[52622af6-969a-a161-ff87-4f4559b12465]529faab6-55b4-4fe0-ad80-f2bfcb7330ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2528.776731] env[63241]: DEBUG nova.network.neutron [req-a31ad256-f1f1-4896-8b98-9beaa58a2ecd req-53296a82-7736-4337-89e0-bb9c891e0658 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updated VIF entry in instance network info cache for port 47ef8579-84c9-45a0-bec4-9baf8c6792e7. {{(pid=63241) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2528.777153] env[63241]: DEBUG nova.network.neutron [req-a31ad256-f1f1-4896-8b98-9beaa58a2ecd req-53296a82-7736-4337-89e0-bb9c891e0658 service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updating instance_info_cache with network_info: [{"id": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "address": "fa:16:3e:8d:8a:6f", "network": {"id": "61259ca6-b9bb-4563-8f00-d69fc8740584", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-233100751-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39ea2ef9af4742768fc75e7a839b0416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47ef8579-84", "ovs_interfaceid": "47ef8579-84c9-45a0-bec4-9baf8c6792e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2528.997868] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Preparing fetch location {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2528.998108] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Fetch image to [datastore1] OSTACK_IMG_b1e3971f-fe16-4d76-8a0d-cfd751d85645/OSTACK_IMG_b1e3971f-fe16-4d76-8a0d-cfd751d85645.vmdk {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2528.998299] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Downloading stream optimized image d0f99cfb-21b1-4f33-adae-dc976f0ca7f7 to [datastore1] OSTACK_IMG_b1e3971f-fe16-4d76-8a0d-cfd751d85645/OSTACK_IMG_b1e3971f-fe16-4d76-8a0d-cfd751d85645.vmdk on the data store datastore1 as vApp {{(pid=63241) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2528.998471] env[63241]: DEBUG nova.virt.vmwareapi.images [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Downloading image file data d0f99cfb-21b1-4f33-adae-dc976f0ca7f7 to the ESX as VM named 'OSTACK_IMG_b1e3971f-fe16-4d76-8a0d-cfd751d85645' {{(pid=63241) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2529.062147] env[63241]: DEBUG oslo_vmware.rw_handles [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2529.062147] env[63241]: value = "resgroup-9" [ 2529.062147] env[63241]: _type = "ResourcePool" [ 2529.062147] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2529.062417] env[63241]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-50a8e78b-f42b-4474-9e8f-142c2148d40a {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.083954] env[63241]: DEBUG oslo_vmware.rw_handles [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lease: (returnval){ [ 2529.083954] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ab3f7b-7511-1616-1cc6-1d67a073fbed" [ 2529.083954] env[63241]: _type = "HttpNfcLease" [ 2529.083954] env[63241]: } obtained for vApp import into resource pool (val){ [ 2529.083954] env[63241]: value = "resgroup-9" [ 2529.083954] env[63241]: _type = "ResourcePool" [ 2529.083954] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2529.084230] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the lease: (returnval){ [ 2529.084230] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ab3f7b-7511-1616-1cc6-1d67a073fbed" [ 2529.084230] env[63241]: _type = "HttpNfcLease" [ 2529.084230] env[63241]: } to be ready. {{(pid=63241) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2529.090111] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2529.090111] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ab3f7b-7511-1616-1cc6-1d67a073fbed" [ 2529.090111] env[63241]: _type = "HttpNfcLease" [ 2529.090111] env[63241]: } is initializing. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2529.279710] env[63241]: DEBUG oslo_concurrency.lockutils [req-a31ad256-f1f1-4896-8b98-9beaa58a2ecd req-53296a82-7736-4337-89e0-bb9c891e0658 service nova] Releasing lock "refresh_cache-ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2529.592264] env[63241]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2529.592264] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ab3f7b-7511-1616-1cc6-1d67a073fbed" [ 2529.592264] env[63241]: _type = "HttpNfcLease" [ 2529.592264] env[63241]: } is ready. {{(pid=63241) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2529.592672] env[63241]: DEBUG oslo_vmware.rw_handles [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2529.592672] env[63241]: value = "session[52622af6-969a-a161-ff87-4f4559b12465]52ab3f7b-7511-1616-1cc6-1d67a073fbed" [ 2529.592672] env[63241]: _type = "HttpNfcLease" [ 2529.592672] env[63241]: }. {{(pid=63241) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2529.593329] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e44775-b6e6-48d8-908f-b705fb817f1f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.600251] env[63241]: DEBUG oslo_vmware.rw_handles [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5277becf-c244-e47c-d01b-30ddb478df7f/disk-0.vmdk from lease info. {{(pid=63241) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2529.600424] env[63241]: DEBUG oslo_vmware.rw_handles [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5277becf-c244-e47c-d01b-30ddb478df7f/disk-0.vmdk. {{(pid=63241) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2529.662266] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2e4c0def-0759-4b93-84e5-dfbbf6d1330d {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2530.727328] env[63241]: DEBUG oslo_vmware.rw_handles [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Completed reading data from the image iterator. {{(pid=63241) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2530.727600] env[63241]: DEBUG oslo_vmware.rw_handles [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5277becf-c244-e47c-d01b-30ddb478df7f/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2530.728627] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079a9572-880d-4895-9dcd-8af7db45c58c {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2530.735964] env[63241]: DEBUG oslo_vmware.rw_handles [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5277becf-c244-e47c-d01b-30ddb478df7f/disk-0.vmdk is in state: ready. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2530.736193] env[63241]: DEBUG oslo_vmware.rw_handles [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5277becf-c244-e47c-d01b-30ddb478df7f/disk-0.vmdk. {{(pid=63241) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2530.736425] env[63241]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-98818ca5-25fe-49d8-8999-dcd23e435221 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2530.920215] env[63241]: DEBUG oslo_vmware.rw_handles [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5277becf-c244-e47c-d01b-30ddb478df7f/disk-0.vmdk. {{(pid=63241) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2530.920456] env[63241]: INFO nova.virt.vmwareapi.images [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Downloaded image file data d0f99cfb-21b1-4f33-adae-dc976f0ca7f7 [ 2530.921314] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bcebb5-5d98-4ee0-9589-3a017e90db47 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2530.936260] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-560b08bb-06d0-49c3-826a-019c9ca240e1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2530.962194] env[63241]: INFO nova.virt.vmwareapi.images [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] The imported VM was unregistered [ 2530.964415] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Caching image {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2530.964681] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Creating directory with path [datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7 {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2530.964953] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3188bb85-ca6e-43c1-942f-fb5cc339f6dd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2530.998783] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Created directory with path [datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7 {{(pid=63241) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2530.998930] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_b1e3971f-fe16-4d76-8a0d-cfd751d85645/OSTACK_IMG_b1e3971f-fe16-4d76-8a0d-cfd751d85645.vmdk to [datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7.vmdk. {{(pid=63241) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2530.999181] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-444dc7fb-fe14-4ea4-97eb-a673e15740b6 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2531.005615] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2531.005615] env[63241]: value = "task-1821535" [ 2531.005615] env[63241]: _type = "Task" [ 2531.005615] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2531.012779] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821535, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2531.517963] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821535, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2532.018625] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821535, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2532.519712] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821535, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2533.020327] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821535, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2533.520146] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821535, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.345204} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2533.520432] env[63241]: INFO nova.virt.vmwareapi.ds_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_b1e3971f-fe16-4d76-8a0d-cfd751d85645/OSTACK_IMG_b1e3971f-fe16-4d76-8a0d-cfd751d85645.vmdk to [datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7.vmdk. [ 2533.520619] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Cleaning up location [datastore1] OSTACK_IMG_b1e3971f-fe16-4d76-8a0d-cfd751d85645 {{(pid=63241) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2533.520785] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_b1e3971f-fe16-4d76-8a0d-cfd751d85645 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2533.521047] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e6987c1-fa05-4b50-8939-57783bdfa9f7 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2533.527208] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2533.527208] env[63241]: value = "task-1821536" [ 2533.527208] env[63241]: _type = "Task" [ 2533.527208] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2533.534236] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2534.037239] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.051083} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2534.037598] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2534.037714] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7.vmdk" {{(pid=63241) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2534.037927] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7.vmdk to [datastore1] ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7/ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2534.038196] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4706e841-40b1-41fd-9d94-5def1d38f548 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2534.044684] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2534.044684] env[63241]: value = "task-1821537" [ 2534.044684] env[63241]: _type = "Task" [ 2534.044684] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2534.052079] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2534.557779] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821537, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2535.055904] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821537, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2535.558848] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821537, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2536.059072] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821537, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2536.557540] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821537, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.232819} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2536.557789] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7/d0f99cfb-21b1-4f33-adae-dc976f0ca7f7.vmdk to [datastore1] ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7/ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7.vmdk {{(pid=63241) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2536.558550] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0af138-449a-4808-a045-a47299034688 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.579802] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7/ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7.vmdk or device None with type streamOptimized {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2536.580075] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1329b29c-ed56-4b43-89ce-69d268ca73af {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2536.598835] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2536.598835] env[63241]: value = "task-1821538" [ 2536.598835] env[63241]: _type = "Task" [ 2536.598835] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2536.606469] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821538, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2537.108787] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821538, 'name': ReconfigVM_Task, 'duration_secs': 0.251914} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2537.109084] env[63241]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Reconfigured VM instance instance-0000007d to attach disk [datastore1] ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7/ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7.vmdk or device None with type streamOptimized {{(pid=63241) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2537.109732] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd81cf54-0b5d-49a5-830a-10bf3898abc5 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2537.115582] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2537.115582] env[63241]: value = "task-1821539" [ 2537.115582] env[63241]: _type = "Task" [ 2537.115582] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2537.122950] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821539, 'name': Rename_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2537.154582] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2537.625363] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821539, 'name': Rename_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2538.126483] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821539, 'name': Rename_Task} progress is 99%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2538.451493] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2538.626989] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821539, 'name': Rename_Task, 'duration_secs': 1.133555} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2538.627149] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Powering on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2538.627353] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f217ae8a-4ca3-4757-a792-e1f33c47f3a3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2538.633515] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2538.633515] env[63241]: value = "task-1821540" [ 2538.633515] env[63241]: _type = "Task" [ 2538.633515] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2538.640938] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821540, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2539.143962] env[63241]: DEBUG oslo_vmware.api [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821540, 'name': PowerOnVM_Task, 'duration_secs': 0.434911} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2539.144255] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Powered on the VM {{(pid=63241) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2539.307753] env[63241]: DEBUG nova.compute.manager [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Checking state {{(pid=63241) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2539.308756] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec164c7-d715-4837-b666-a48a08ccdcb1 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2539.451398] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2539.451589] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63241) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2539.827255] env[63241]: DEBUG oslo_concurrency.lockutils [None req-2c987595-dc0b-4c68-a02e-145510dceeb5 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.404s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2541.002610] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2541.002971] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2541.003162] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2541.003355] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2541.003530] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2541.006038] env[63241]: INFO nova.compute.manager [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Terminating instance [ 2541.007737] env[63241]: DEBUG nova.compute.manager [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Start destroying the instance on the hypervisor. {{(pid=63241) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2541.007973] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Destroying instance {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2541.008780] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7613510b-884a-4ee4-99f6-cbc2db284251 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2541.016340] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Powering off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2541.016828] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebbde989-b6d3-4e6a-b9ca-47bbd52bdcf0 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2541.023094] env[63241]: DEBUG oslo_vmware.api [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2541.023094] env[63241]: value = "task-1821541" [ 2541.023094] env[63241]: _type = "Task" [ 2541.023094] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2541.030579] env[63241]: DEBUG oslo_vmware.api [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2541.533204] env[63241]: DEBUG oslo_vmware.api [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821541, 'name': PowerOffVM_Task, 'duration_secs': 0.16686} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2541.533466] env[63241]: DEBUG nova.virt.vmwareapi.vm_util [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Powered off the VM {{(pid=63241) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2541.533634] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Unregistering the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2541.533870] env[63241]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69aee9c0-4699-4c4d-8648-4e58a85c6a8b {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2541.602565] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Unregistered the VM {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2541.602800] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Deleting contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2541.602986] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleting the datastore file [datastore1] ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2541.603266] env[63241]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa8a81cc-df79-44b6-87cb-4be7d533d3de {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2541.609469] env[63241]: DEBUG oslo_vmware.api [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for the task: (returnval){ [ 2541.609469] env[63241]: value = "task-1821543" [ 2541.609469] env[63241]: _type = "Task" [ 2541.609469] env[63241]: } to complete. {{(pid=63241) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2541.616802] env[63241]: DEBUG oslo_vmware.api [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821543, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2542.118821] env[63241]: DEBUG oslo_vmware.api [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Task: {'id': task-1821543, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123606} completed successfully. {{(pid=63241) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2542.119198] env[63241]: DEBUG nova.virt.vmwareapi.ds_util [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleted the datastore file {{(pid=63241) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2542.119325] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Deleted contents of the VM from datastore datastore1 {{(pid=63241) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2542.119457] env[63241]: DEBUG nova.virt.vmwareapi.vmops [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Instance destroyed {{(pid=63241) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2542.119631] env[63241]: INFO nova.compute.manager [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2542.119896] env[63241]: DEBUG oslo.service.loopingcall [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63241) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2542.120101] env[63241]: DEBUG nova.compute.manager [-] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Deallocating network for instance {{(pid=63241) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2542.120197] env[63241]: DEBUG nova.network.neutron [-] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] deallocate_for_instance() {{(pid=63241) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2542.451587] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2542.539422] env[63241]: DEBUG nova.compute.manager [req-ebd46a6d-80bf-41f2-941f-756d9ac449a1 req-7b7b11fc-b3b4-473c-a493-a56031a822dd service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Received event network-vif-deleted-47ef8579-84c9-45a0-bec4-9baf8c6792e7 {{(pid=63241) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2542.539644] env[63241]: INFO nova.compute.manager [req-ebd46a6d-80bf-41f2-941f-756d9ac449a1 req-7b7b11fc-b3b4-473c-a493-a56031a822dd service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Neutron deleted interface 47ef8579-84c9-45a0-bec4-9baf8c6792e7; detaching it from the instance and deleting it from the info cache [ 2542.539829] env[63241]: DEBUG nova.network.neutron [req-ebd46a6d-80bf-41f2-941f-756d9ac449a1 req-7b7b11fc-b3b4-473c-a493-a56031a822dd service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2543.013436] env[63241]: DEBUG nova.network.neutron [-] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Updating instance_info_cache with network_info: [] {{(pid=63241) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2543.042836] env[63241]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-14ed15cd-7998-4c3b-aa24-242dfd3a54bd {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2543.053765] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ab0eae-bb07-4030-95f1-462a5ca27a1f {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2543.077553] env[63241]: DEBUG nova.compute.manager [req-ebd46a6d-80bf-41f2-941f-756d9ac449a1 req-7b7b11fc-b3b4-473c-a493-a56031a822dd service nova] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Detach interface failed, port_id=47ef8579-84c9-45a0-bec4-9baf8c6792e7, reason: Instance ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 could not be found. {{(pid=63241) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2543.452175] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2543.452483] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Starting heal instance info cache {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2543.452483] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Rebuilding the list of instances to heal {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2543.517938] env[63241]: INFO nova.compute.manager [-] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Took 1.40 seconds to deallocate network for instance. [ 2543.955489] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] [instance: ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7] Skipping network cache update for instance because it is being deleted. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 2543.955657] env[63241]: DEBUG nova.compute.manager [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Didn't find any instances for network info cache update. {{(pid=63241) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2544.024099] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2544.024359] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2544.024577] env[63241]: DEBUG nova.objects.instance [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lazy-loading 'resources' on Instance uuid ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 {{(pid=63241) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2544.558999] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33aa39ac-bddf-4d52-81e0-9fab2e736288 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2544.566356] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea308ec1-5c52-4525-885e-76b52a9eb9eb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2544.595110] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e95900-fdaa-4b2d-8def-346e9a8a234e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2544.601387] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d8213b-545e-4e1e-9e87-0830ebca741e {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2544.614098] env[63241]: DEBUG nova.compute.provider_tree [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2545.133184] env[63241]: ERROR nova.scheduler.client.report [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] [req-dd4cc552-f7f8-4dcf-918f-d270a089b922] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dd4cc552-f7f8-4dcf-918f-d270a089b922"}]} [ 2545.148577] env[63241]: DEBUG nova.scheduler.client.report [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Refreshing inventories for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2545.160498] env[63241]: DEBUG nova.scheduler.client.report [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating ProviderTree inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2545.160701] env[63241]: DEBUG nova.compute.provider_tree [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2545.170326] env[63241]: DEBUG nova.scheduler.client.report [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Refreshing aggregate associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, aggregates: None {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2545.189390] env[63241]: DEBUG nova.scheduler.client.report [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Refreshing trait associations for resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63241) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2545.211382] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841f751f-7177-4899-956c-5c27c98b57bb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2545.218646] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de98bc12-b227-4cff-b382-46fbb640db44 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2545.248606] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d123dd0-8842-4dab-93ba-166ec9b6a078 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2545.255301] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ac3765-9775-47f3-99ec-87c5509c9116 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2545.267842] env[63241]: DEBUG nova.compute.provider_tree [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2545.451894] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2545.798021] env[63241]: DEBUG nova.scheduler.client.report [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updated inventory for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with generation 204 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2545.799143] env[63241]: DEBUG nova.compute.provider_tree [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating resource provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b generation from 204 to 205 during operation: update_inventory {{(pid=63241) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2545.799143] env[63241]: DEBUG nova.compute.provider_tree [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Updating inventory in ProviderTree for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2546.303515] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.279s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2546.326295] env[63241]: INFO nova.scheduler.client.report [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Deleted allocations for instance ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7 [ 2546.834424] env[63241]: DEBUG oslo_concurrency.lockutils [None req-b862bfb5-2545-4392-b7ed-08acc044b8d8 tempest-ServerActionsTestOtherB-1384230803 tempest-ServerActionsTestOtherB-1384230803-project-member] Lock "ed2518cc-57fe-4e3a-bd93-ee493c2dd2c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.831s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2547.451545] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2551.451516] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2552.447798] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2552.955284] env[63241]: DEBUG oslo_service.periodic_task [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63241) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2553.458287] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2553.458494] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2553.458664] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2553.458822] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63241) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2553.459745] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabc1a60-1034-4088-9d7e-5ee576bc1065 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2553.468356] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4e9ba7-69eb-46f4-9dc6-47fcffce9cb3 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2553.483337] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f746fb74-e22c-4911-bd62-a535c02dd7ed {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2553.489594] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9e7bef-4231-4452-9d8e-11f829e0e338 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2553.517061] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181060MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=63241) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2553.517205] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2553.517391] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2554.579158] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2554.579413] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63241) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2554.592308] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4c3c0a-37b6-4a63-a92d-b63a46b151ca {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2554.600448] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902e842a-52cd-4425-9892-12f18eb6b447 {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2554.631481] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1a58f8-0fe8-4a15-be1e-2734d080edfa {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2554.638898] env[63241]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01754bb3-7e3c-4047-88d4-75a3ddf0d4fb {{(pid=63241) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2554.652057] env[63241]: DEBUG nova.compute.provider_tree [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed in ProviderTree for provider: 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b {{(pid=63241) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2555.155176] env[63241]: DEBUG nova.scheduler.client.report [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Inventory has not changed for provider 9a5e30eb-ceae-4224-aa66-dcbfa98ce24b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63241) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2555.659741] env[63241]: DEBUG nova.compute.resource_tracker [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63241) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2555.660089] env[63241]: DEBUG oslo_concurrency.lockutils [None req-40592b81-b94d-44db-b929-ff15cf704de7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.143s {{(pid=63241) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}